diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9e190d43b28ef9..0add48e777866c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,11 +5,11 @@ # https://git-scm.com/docs/gitignore#_pattern_format # GitHub -.github/** @ezio-melotti @hugovk +.github/** @ezio-melotti @hugovk @AA-Turner # pre-commit .pre-commit-config.yaml @hugovk @AlexWaygood -.ruff.toml @hugovk @AlexWaygood +.ruff.toml @hugovk @AlexWaygood @AA-Turner # Build system configure* @erlend-aasland @corona10 @@ -56,6 +56,14 @@ Tools/c-analyzer/ @ericsnowcurrently # dbm **/*dbm* @corona10 @erlend-aasland @serhiy-storchaka +# Doc/ tools +Doc/conf.py @AA-Turner @hugovk +Doc/Makefile @AA-Turner @hugovk +Doc/make.bat @AA-Turner @hugovk +Doc/requirements.txt @AA-Turner @hugovk +Doc/_static/** @AA-Turner @hugovk +Doc/tools/** @AA-Turner @hugovk + # runtime state/lifecycle **/*pylifecycle* @ericsnowcurrently **/*pystate* @ericsnowcurrently @@ -96,13 +104,14 @@ Doc/library/site.rst @FFY00 Lib/test/test_except*.py @iritkatriel Objects/exceptions.c @iritkatriel -# Hashing -**/*hashlib* @gpshead @tiran +# Hashing & cryptographic primitives +**/*hashlib* @gpshead @tiran @picnixz **/*pyhash* @gpshead @tiran -**/sha* @gpshead @tiran -Modules/md5* @gpshead @tiran -**/*blake* @gpshead @tiran +**/sha* @gpshead @tiran @picnixz +Modules/md5* @gpshead @tiran @picnixz +**/*blake* @gpshead @tiran @picnixz Modules/_hacl/** @gpshead +**/*hmac* @gpshead @picnixz # logging **/*logging* @vsajip @@ -292,6 +301,10 @@ Lib/configparser.py @jaraco Lib/test/test_configparser.py @jaraco # Doc sections -Doc/reference/ @willingc +Doc/reference/ @willingc @AA-Turner **/*weakref* @kumaraditya303 + +# Colorize +Lib/_colorize.py @hugovk +Lib/test/test__colorize.py @hugovk diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml index 3701f7297ffeb2..af125266ae7813 100644 --- a/.github/actionlint.yaml +++ b/.github/actionlint.yaml @@ -1,5 +1,5 @@ self-hosted-runner: - labels: ["ubuntu-24.04-aarch64", "windows-aarch64"] + labels: ["windows-aarch64"] config-variables: null @@ -7,4 +7,4 @@ paths: .github/workflows/**/*.yml: ignore: - 1st argument of function call is not assignable - - SC2(015|038|086|091|097|098|129|155) \ No newline at end of file + - SC2(015|038|086|091|097|098|129|155) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index aec50c5200d1c6..4d3dadf7d86845 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -18,6 +18,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}-reusable cancel-in-progress: true +env: + FORCE_COLOR: 1 + jobs: check_source: name: Change detection @@ -231,24 +234,31 @@ jobs: name: >- Ubuntu ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} + ${{ fromJSON(matrix.bolt) && '(bolt)' || '' }} needs: check_source if: needs.check_source.outputs.run_tests == 'true' strategy: matrix: + bolt: + - false + - true free-threading: - false - true os: - ubuntu-24.04 - - ubuntu-24.04-aarch64 - is-fork: # only used for the exclusion trick - - ${{ github.repository_owner != 'python' }} + - ubuntu-24.04-arm exclude: - - os: ubuntu-24.04-aarch64 - is-fork: true + # Do not test BOLT with free-threading, to conserve resources + - bolt: true + free-threading: true + # BOLT currently crashes during instrumentation on aarch64 + - os: ubuntu-24.04-arm + bolt: true uses: ./.github/workflows/reusable-ubuntu.yml with: config_hash: ${{ needs.check_source.outputs.config_hash }} + bolt-optimizations: ${{ matrix.bolt }} free-threading: ${{ matrix.free-threading }} os: ${{ matrix.os }} @@ -502,26 +512,59 @@ jobs: run: xvfb-run make ci build_tsan: - name: 'Thread sanitizer' + name: >- + Thread sanitizer + ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} needs: check_source if: needs.check_source.outputs.run_tests == 'true' + strategy: + matrix: + free-threading: + - false + - true uses: ./.github/workflows/reusable-tsan.yml with: config_hash: ${{ needs.check_source.outputs.config_hash }} - options: ./configure --config-cache --with-thread-sanitizer --with-pydebug - suppressions_path: Tools/tsan/supressions.txt - tsan_logs_artifact_name: tsan-logs-default + free-threading: ${{ matrix.free-threading }} - build_tsan_free_threading: - name: 'Thread sanitizer (free-threading)' + cross-build-linux: + name: Cross build Linux + runs-on: ubuntu-latest needs: check_source if: needs.check_source.outputs.run_tests == 'true' - uses: ./.github/workflows/reusable-tsan.yml - with: - config_hash: ${{ needs.check_source.outputs.config_hash }} - options: ./configure --config-cache --disable-gil --with-thread-sanitizer --with-pydebug - suppressions_path: Tools/tsan/suppressions_free_threading.txt - tsan_logs_artifact_name: tsan-logs-free-threading + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Runner image version + run: echo "IMAGE_VERSION=${ImageVersion}" >> "$GITHUB_ENV" + - name: Restore config.cache + uses: actions/cache@v4 + with: + path: config.cache + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }} + - name: Register gcc problem matcher + run: echo "::add-matcher::.github/problem-matchers/gcc.json" + - name: Set build dir + run: + # an absolute path outside of the working directoy + echo "BUILD_DIR=$(realpath ${{ github.workspace }}/../build)" >> "$GITHUB_ENV" + - name: Install Dependencies + run: sudo ./.github/workflows/posix-deps-apt.sh + - name: Configure host build + run: ./configure --prefix="$BUILD_DIR/host-python" + - name: Install host Python + run: make -j8 install + - name: Run test subset with host build + run: | + "$BUILD_DIR/host-python/bin/python3" -m test test_sysconfig test_site test_embed + - name: Configure cross build + run: ./configure --prefix="$BUILD_DIR/cross-python" --with-build-python="$BUILD_DIR/host-python/bin/python3" + - name: Install cross Python + run: make -j8 install + - name: Run test subset with host build + run: | + "$BUILD_DIR/cross-python/bin/python3" -m test test_sysconfig test_site test_embed # CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/ cifuzz: @@ -581,7 +624,6 @@ jobs: - test_hypothesis - build_asan - build_tsan - - build_tsan_free_threading - cifuzz runs-on: ubuntu-latest @@ -615,7 +657,6 @@ jobs: build_windows, build_asan, build_tsan, - build_tsan_free_threading, ' || '' }} diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml index 9b84998a55666d..806a8524112d76 100644 --- a/.github/workflows/jit.yml +++ b/.github/workflows/jit.yml @@ -25,6 +25,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +env: + FORCE_COLOR: 1 + jobs: interpreter: name: Interpreter (Debug) @@ -83,8 +86,7 @@ jobs: runner: ubuntu-24.04 - target: aarch64-unknown-linux-gnu/gcc architecture: aarch64 - # Forks don't have access to our paid AArch64 runners. These jobs are skipped below: - runner: ${{ github.repository_owner == 'python' && 'ubuntu-24.04-aarch64' || 'ubuntu-24.04' }} + runner: ubuntu-24.04-arm steps: - uses: actions/checkout@v4 with: @@ -123,8 +125,7 @@ jobs: ./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3 - name: Native Linux - # Forks don't have access to our paid AArch64 runners. Skip those: - if: runner.os == 'Linux' && (matrix.architecture == 'x86_64' || github.repository_owner == 'python') + if: runner.os == 'Linux' run: | sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }} export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH" diff --git a/.github/workflows/reusable-change-detection.yml b/.github/workflows/reusable-change-detection.yml index 133e9978181df2..6950b84461daf7 100644 --- a/.github/workflows/reusable-change-detection.yml +++ b/.github/workflows/reusable-change-detection.yml @@ -88,7 +88,22 @@ jobs: # into the PR branch anyway. # # https://github.com/python/core-workflow/issues/373 - git diff --name-only "origin/$GITHUB_BASE_REF.." | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run-tests=true" >> "$GITHUB_OUTPUT" || true + grep_ignore_args=( + # file extensions + -e '\.md$' + -e '\.rst$' + # top-level folders + -e '^Doc/' + -e '^Misc/' + # configuration files + -e '^\.github/CODEOWNERS$' + -e '^\.pre-commit-config\.yaml$' + -e '\.ruff\.toml$' + -e 'mypy\.ini$' + ) + git diff --name-only "origin/$GITHUB_BASE_REF.." \ + | grep -qvE "${grep_ignore_args[@]}" \ + && echo "run-tests=true" >> "$GITHUB_OUTPUT" || true fi # Check if we should run hypothesis tests @@ -120,8 +135,8 @@ jobs: if [ "$GITHUB_BASE_REF" = "main" ]; then CHANGED_FILES=$(git diff --name-only "origin/$GITHUB_BASE_REF..") # Check if changes are ONLY in configure/Makefile files - if echo "$CHANGED_FILES" | grep -qE '^(configure.*|Makefile.*|.*\.m4)$' && \ - ! echo "$CHANGED_FILES" | grep -qvE '^(configure.*|Makefile.*|.*\.m4)$'; then + if echo "$CHANGED_FILES" | grep -qE '^(configure.*|Makefile(\.pre)?\.in|Makefile|.*\.m4)$' && \ + ! echo "$CHANGED_FILES" | grep -qvE '^(configure.*|Makefile(\.pre)?\.in|Makefile|.*\.m4)$'; then # Only configure/Makefile files changed, skip Windows CI echo "run-windows=false" >> "$GITHUB_OUTPUT" else diff --git a/.github/workflows/reusable-docs.yml b/.github/workflows/reusable-docs.yml index 88da55bf08b8fe..6738acc98c6565 100644 --- a/.github/workflows/reusable-docs.yml +++ b/.github/workflows/reusable-docs.yml @@ -65,8 +65,8 @@ jobs: continue-on-error: true run: | set -Eeuo pipefail - # Build docs with the '-n' (nit-picky) option; write warnings to file - make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going -w sphinx-warnings.txt" html + # Build docs with the nit-picky option; write warnings to file + make -C Doc/ PYTHON=../python SPHINXOPTS="--quiet --nitpicky --fail-on-warning --keep-going --warning-file sphinx-warnings.txt" html - name: 'Check warnings' if: github.event_name == 'pull_request' run: | @@ -76,26 +76,6 @@ jobs: --fail-if-improved \ --fail-if-new-news-nit - # This build doesn't use problem matchers or check annotations - build_doc_oldest_supported_sphinx: - name: 'Docs (Oldest Sphinx)' - runs-on: ubuntu-latest - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: 'Set up Python' - uses: actions/setup-python@v5 - with: - python-version: '3.13' # known to work with Sphinx 7.2.6 - cache: 'pip' - cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt' - - name: 'Install build dependencies' - run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt" - - name: 'Build HTML documentation' - run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html - # Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release doctest: name: 'Doctest' @@ -121,4 +101,4 @@ jobs: run: make -C Doc/ PYTHON=../python venv # Use "xvfb-run" since some doctest tests open GUI windows - name: 'Run documentation doctest' - run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="-W --keep-going" doctest + run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="--fail-on-warning --keep-going" doctest diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml index cdbe05e09fb8e7..4e7aced94a321c 100644 --- a/.github/workflows/reusable-macos.yml +++ b/.github/workflows/reusable-macos.yml @@ -15,6 +15,9 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_macos: name: build and test (${{ inputs.os }}) diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml index b5144ca3e9efc4..1d2548565d50ef 100644 --- a/.github/workflows/reusable-tsan.yml +++ b/.github/workflows/reusable-tsan.yml @@ -6,26 +6,20 @@ on: config_hash: required: true type: string - options: - required: true - type: string - suppressions_path: - description: 'A repo relative path to the suppressions file' - required: true - type: string - tsan_logs_artifact_name: - description: 'Name of the TSAN logs artifact. Must be unique for each job.' - required: true - type: string + free-threading: + description: Whether to use free-threaded mode + required: false + type: boolean + default: false + +env: + FORCE_COLOR: 1 jobs: build_tsan_reusable: name: 'Thread sanitizer' runs-on: ubuntu-24.04 timeout-minutes: 60 - env: - OPTIONS: ${{ inputs.options }} - SUPPRESSIONS_PATH: ${{ inputs.suppressions_path }} steps: - uses: actions/checkout@v4 with: @@ -52,7 +46,11 @@ jobs: sudo sysctl -w vm.mmap_rnd_bits=28 - name: TSAN Option Setup run: | - echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/${SUPPRESSIONS_PATH} handle_segv=0" >> "$GITHUB_ENV" + echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/Tools/tsan/suppressions${{ + fromJSON(inputs.free-threading) + && '_free_threading' + || '' + }}.txt handle_segv=0" >> "$GITHUB_ENV" echo "CC=clang" >> "$GITHUB_ENV" echo "CXX=clang++" >> "$GITHUB_ENV" - name: Add ccache to PATH @@ -64,7 +62,12 @@ jobs: save: ${{ github.event_name == 'push' }} max-size: "200M" - name: Configure CPython - run: "${OPTIONS}" + run: >- + ./configure + --config-cache + --with-thread-sanitizer + --with-pydebug + ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} - name: Build CPython run: make -j4 - name: Display build info @@ -78,6 +81,11 @@ jobs: if: always() uses: actions/upload-artifact@v4 with: - name: ${{ inputs.tsan_logs_artifact_name }} + name: >- + tsan-logs-${{ + fromJSON(inputs.free-threading) + && 'free-threading' + || 'default' + }} path: tsan_log.* if-no-files-found: ignore diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index 46c542940c8483..aa8ba00f19d8ca 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -6,6 +6,11 @@ on: config_hash: required: true type: string + bolt-optimizations: + description: Whether to enable BOLT optimizations + required: false + type: boolean + default: false free-threading: description: Whether to use free-threaded mode required: false @@ -16,13 +21,15 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_ubuntu_reusable: name: build and test (${{ inputs.os }}) timeout-minutes: 60 runs-on: ${{ inputs.os }} env: - FORCE_COLOR: 1 OPENSSL_VER: 3.0.15 PYTHONSTRICTEXTENSIONBUILD: 1 TERM: linux @@ -34,6 +41,12 @@ jobs: run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install dependencies run: sudo ./.github/workflows/posix-deps-apt.sh + - name: Install Clang and BOLT + if: ${{ fromJSON(inputs.bolt-optimizations) }} + run: | + sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh 19 + sudo apt-get install bolt-19 + echo PATH="$(llvm-config-19 --bindir):$PATH" >> $GITHUB_ENV - name: Configure OpenSSL env vars run: | echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV" @@ -73,7 +86,10 @@ jobs: key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }} - name: Configure CPython out-of-tree working-directory: ${{ env.CPYTHON_BUILDDIR }} + # `test_unpickle_module_race` writes to the source directory, which is + # read-only during builds — so we exclude it from profiling with BOLT. run: >- + PROFILE_TASK='-m test --pgo --ignore test_unpickle_module_race' ../cpython-ro-srcdir/configure --config-cache --with-pydebug @@ -81,6 +97,7 @@ jobs: --enable-safety --with-openssl="$OPENSSL_DIR" ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} + ${{ fromJSON(inputs.bolt-optimizations) && '--enable-bolt' || '' }} - name: Build CPython out-of-tree if: ${{ inputs.free-threading }} working-directory: ${{ env.CPYTHON_BUILDDIR }} diff --git a/.github/workflows/reusable-wasi.yml b/.github/workflows/reusable-wasi.yml index 4356d9c1c8795e..4456b83c8e0032 100644 --- a/.github/workflows/reusable-wasi.yml +++ b/.github/workflows/reusable-wasi.yml @@ -7,6 +7,9 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_wasi_reusable: name: 'build and test' diff --git a/.github/workflows/reusable-windows-msi.yml b/.github/workflows/reusable-windows-msi.yml index a1c45d954247fb..bc0414d1bbcd8f 100644 --- a/.github/workflows/reusable-windows-msi.yml +++ b/.github/workflows/reusable-windows-msi.yml @@ -11,6 +11,9 @@ on: permissions: contents: read +env: + FORCE_COLOR: 1 + jobs: build: name: installer for ${{ inputs.arch }} diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml index 459d2b29e5d42b..5485a0169130b0 100644 --- a/.github/workflows/reusable-windows.yml +++ b/.github/workflows/reusable-windows.yml @@ -18,12 +18,13 @@ on: default: false env: + FORCE_COLOR: 1 IncludeUwp: >- true jobs: build: - name: 'build and test (${{ inputs.arch }})' + name: ${{ inputs.arch == 'arm64' && 'build' || 'build and test' }} (${{ inputs.arch }}) runs-on: ${{ inputs.os }} timeout-minutes: 60 env: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af6accd89b5bd4..fb44c27704d455 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.9.1 hooks: - id: ruff name: Run Ruff (lint) on Doc/ @@ -29,12 +29,10 @@ repos: - id: black name: Run Black on Tools/build/check_warnings.py files: ^Tools/build/check_warnings.py - language_version: python3.12 args: [--line-length=79] - id: black name: Run Black on Tools/jit/ files: ^Tools/jit/ - language_version: python3.12 - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 @@ -51,19 +49,19 @@ repos: types_or: [c, inc, python, rst] - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.30.0 + rev: 0.31.0 hooks: - id: check-dependabot - id: check-github-workflows - id: check-readthedocs - repo: https://github.com/rhysd/actionlint - rev: v1.7.4 + rev: v1.7.7 hooks: - id: actionlint - repo: https://github.com/woodruffw/zizmor-pre-commit - rev: v0.8.0 + rev: v1.1.1 hooks: - id: zizmor diff --git a/Android/android-env.sh b/Android/android-env.sh index a0f23ef8c9fc52..bab4130c9e92d0 100644 --- a/Android/android-env.sh +++ b/Android/android-env.sh @@ -1,10 +1,10 @@ # This script must be sourced with the following variables already set: -: ${ANDROID_HOME:?} # Path to Android SDK -: ${HOST:?} # GNU target triplet +: "${ANDROID_HOME:?}" # Path to Android SDK +: "${HOST:?}" # GNU target triplet # You may also override the following: -: ${api_level:=24} # Minimum Android API level the build will run on -: ${PREFIX:-} # Path in which to find required libraries +: "${api_level:=24}" # Minimum Android API level the build will run on +: "${PREFIX:-}" # Path in which to find required libraries # Print all messages on stderr so they're visible when running within build-wheel. @@ -27,20 +27,20 @@ fail() { ndk_version=27.1.12297006 ndk=$ANDROID_HOME/ndk/$ndk_version -if ! [ -e $ndk ]; then +if ! [ -e "$ndk" ]; then log "Installing NDK - this may take several minutes" - yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version" + yes | "$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager" "ndk;$ndk_version" fi -if [ $HOST = "arm-linux-androideabi" ]; then +if [ "$HOST" = "arm-linux-androideabi" ]; then clang_triplet=armv7a-linux-androideabi else - clang_triplet=$HOST + clang_triplet="$HOST" fi # These variables are based on BuildSystemMaintainers.md above, and # $ndk/build/cmake/android.toolchain.cmake. -toolchain=$(echo $ndk/toolchains/llvm/prebuilt/*) +toolchain=$(echo "$ndk"/toolchains/llvm/prebuilt/*) export AR="$toolchain/bin/llvm-ar" export AS="$toolchain/bin/llvm-as" export CC="$toolchain/bin/${clang_triplet}${api_level}-clang" @@ -72,12 +72,12 @@ LDFLAGS="$LDFLAGS -lm" # -mstackrealign is included where necessary in the clang launcher scripts which are # pointed to by $CC, so we don't need to include it here. -if [ $HOST = "arm-linux-androideabi" ]; then +if [ "$HOST" = "arm-linux-androideabi" ]; then CFLAGS="$CFLAGS -march=armv7-a -mthumb" fi if [ -n "${PREFIX:-}" ]; then - abs_prefix=$(realpath $PREFIX) + abs_prefix="$(realpath "$PREFIX")" CFLAGS="$CFLAGS -I$abs_prefix/include" LDFLAGS="$LDFLAGS -L$abs_prefix/lib" @@ -87,11 +87,13 @@ fi # When compiling C++, some build systems will combine CFLAGS and CXXFLAGS, and some will # use CXXFLAGS alone. -export CXXFLAGS=$CFLAGS +export CXXFLAGS="$CFLAGS" # Use the same variable name as conda-build -if [ $(uname) = "Darwin" ]; then - export CPU_COUNT=$(sysctl -n hw.ncpu) +if [ "$(uname)" = "Darwin" ]; then + CPU_COUNT="$(sysctl -n hw.ncpu)" + export CPU_COUNT else - export CPU_COUNT=$(nproc) + CPU_COUNT="$(nproc)" + export CPU_COUNT fi diff --git a/Doc/Makefile b/Doc/Makefile index 4a704ad58b33d3..b8896da4a91869 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -14,15 +14,15 @@ PAPER = SOURCES = DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py) REQUIREMENTS = requirements.txt -SPHINXERRORHANDLING = -W +SPHINXERRORHANDLING = --fail-on-warning # Internal variables. -PAPEROPT_a4 = -D latex_elements.papersize=a4paper -PAPEROPT_letter = -D latex_elements.papersize=letterpaper +PAPEROPT_a4 = --define latex_elements.papersize=a4paper +PAPEROPT_letter = --define latex_elements.papersize=letterpaper -ALLSPHINXOPTS = -b $(BUILDER) \ - -d build/doctrees \ - -j $(JOBS) \ +ALLSPHINXOPTS = --builder $(BUILDER) \ + --doctree-dir build/doctrees \ + --jobs $(JOBS) \ $(PAPEROPT_$(PAPER)) \ $(SPHINXOPTS) $(SPHINXERRORHANDLING) \ . build/$(BUILDER) $(SOURCES) @@ -144,7 +144,7 @@ pydoc-topics: build .PHONY: gettext gettext: BUILDER = gettext -gettext: override SPHINXOPTS := -d build/doctrees-gettext $(SPHINXOPTS) +gettext: override SPHINXOPTS := --doctree-dir build/doctrees-gettext $(SPHINXOPTS) gettext: build .PHONY: htmlview @@ -172,7 +172,7 @@ venv: else \ echo "Creating venv in $(VENVDIR)"; \ if $(UV) --version >/dev/null 2>&1; then \ - $(UV) venv $(VENVDIR); \ + $(UV) venv --python=$(PYTHON) $(VENVDIR); \ VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \ else \ $(PYTHON) -m venv $(VENVDIR); \ @@ -300,20 +300,20 @@ serve: # By default, Sphinx only rebuilds pages where the page content has changed. # This means it doesn't always pick up changes to preferred link targets, etc # To ensure such changes are picked up, we build the published docs with -# `-E` (to ignore the cached environment) and `-a` (to ignore already existing -# output files) +# ``--fresh-env`` (to ignore the cached environment) and ``--write-all`` +# (to ignore already existing output files) # for development releases: always build .PHONY: autobuild-dev autobuild-dev: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short) autobuild-dev: - $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION) + $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION) # for HTML-only rebuilds .PHONY: autobuild-dev-html autobuild-dev-html: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short) autobuild-dev-html: - $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION) + $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION) # for stable releases: only build if not in pre-release stage (alpha, beta) # release candidate downloads are okay, since the stable tree can be in that stage diff --git a/Doc/c-api/apiabiversion.rst b/Doc/c-api/apiabiversion.rst index f6c8284daeacb0..96050f59bd5250 100644 --- a/Doc/c-api/apiabiversion.rst +++ b/Doc/c-api/apiabiversion.rst @@ -6,9 +6,13 @@ API and ABI Versioning *********************** + +Build-time version constants +---------------------------- + CPython exposes its version number in the following macros. -Note that these correspond to the version code is **built** with, -not necessarily the version used at **run time**. +Note that these correspond to the version code is **built** with. +See :c:var:`Py_Version` for the version used at **run time**. See :ref:`stable` for a discussion of API and ABI stability across versions. @@ -37,37 +41,83 @@ See :ref:`stable` for a discussion of API and ABI stability across versions. .. c:macro:: PY_VERSION_HEX The Python version number encoded in a single integer. + See :c:func:`Py_PACK_FULL_VERSION` for the encoding details. - The underlying version information can be found by treating it as a 32 bit - number in the following manner: - - +-------+-------------------------+-------------------------+--------------------------+ - | Bytes | Bits (big endian order) | Meaning | Value for ``3.4.1a2`` | - +=======+=========================+=========================+==========================+ - | 1 | 1-8 | ``PY_MAJOR_VERSION`` | ``0x03`` | - +-------+-------------------------+-------------------------+--------------------------+ - | 2 | 9-16 | ``PY_MINOR_VERSION`` | ``0x04`` | - +-------+-------------------------+-------------------------+--------------------------+ - | 3 | 17-24 | ``PY_MICRO_VERSION`` | ``0x01`` | - +-------+-------------------------+-------------------------+--------------------------+ - | 4 | 25-28 | ``PY_RELEASE_LEVEL`` | ``0xA`` | - + +-------------------------+-------------------------+--------------------------+ - | | 29-32 | ``PY_RELEASE_SERIAL`` | ``0x2`` | - +-------+-------------------------+-------------------------+--------------------------+ + Use this for numeric comparisons, for example, + ``#if PY_VERSION_HEX >= ...``. - Thus ``3.4.1a2`` is hexversion ``0x030401a2`` and ``3.10.0`` is - hexversion ``0x030a00f0``. - Use this for numeric comparisons, e.g. ``#if PY_VERSION_HEX >= ...``. - - This version is also available via the symbol :c:var:`Py_Version`. +Run-time version +---------------- .. c:var:: const unsigned long Py_Version - The Python runtime version number encoded in a single constant integer, with - the same format as the :c:macro:`PY_VERSION_HEX` macro. + The Python runtime version number encoded in a single constant integer. + See :c:func:`Py_PACK_FULL_VERSION` for the encoding details. This contains the Python version used at run time. + Use this for numeric comparisons, for example, ``if (Py_Version >= ...)``. + .. versionadded:: 3.11 -All the given macros are defined in :source:`Include/patchlevel.h`. + +Bit-packing macros +------------------ + +.. c:function:: uint32_t Py_PACK_FULL_VERSION(int major, int minor, int micro, int release_level, int release_serial) + + Return the given version, encoded as a single 32-bit integer with + the following structure: + + +------------------+-------+----------------+-----------+--------------------------+ + | | No. | | | Example values | + | | of | | +-------------+------------+ + | Argument | bits | Bit mask | Bit shift | ``3.4.1a2`` | ``3.10.0`` | + +==================+=======+================+===========+=============+============+ + | *major* | 8 | ``0xFF000000`` | 24 | ``0x03`` | ``0x03`` | + +------------------+-------+----------------+-----------+-------------+------------+ + | *minor* | 8 | ``0x00FF0000`` | 16 | ``0x04`` | ``0x0A`` | + +------------------+-------+----------------+-----------+-------------+------------+ + | *micro* | 8 | ``0x0000FF00`` | 8 | ``0x01`` | ``0x00`` | + +------------------+-------+----------------+-----------+-------------+------------+ + | *release_level* | 4 | ``0x000000F0`` | 4 | ``0xA`` | ``0xF`` | + +------------------+-------+----------------+-----------+-------------+------------+ + | *release_serial* | 4 | ``0x0000000F`` | 0 | ``0x2`` | ``0x0`` | + +------------------+-------+----------------+-----------+-------------+------------+ + + For example: + + +-------------+------------------------------------+-----------------+ + | Version | ``Py_PACK_FULL_VERSION`` arguments | Encoded version | + +=============+====================================+=================+ + | ``3.4.1a2`` | ``(3, 4, 1, 0xA, 2)`` | ``0x030401a2`` | + +-------------+------------------------------------+-----------------+ + | ``3.10.0`` | ``(3, 10, 0, 0xF, 0)`` | ``0x030a00f0`` | + +-------------+------------------------------------+-----------------+ + + Out-of range bits in the arguments are ignored. + That is, the macro can be defined as: + + .. code-block:: c + + #ifndef Py_PACK_FULL_VERSION + #define Py_PACK_FULL_VERSION(X, Y, Z, LEVEL, SERIAL) ( \ + (((X) & 0xff) << 24) | \ + (((Y) & 0xff) << 16) | \ + (((Z) & 0xff) << 8) | \ + (((LEVEL) & 0xf) << 4) | \ + (((SERIAL) & 0xf) << 0)) + #endif + + ``Py_PACK_FULL_VERSION`` is primarily a macro, intended for use in + ``#if`` directives, but it is also available as an exported function. + + .. versionadded:: 3.14 + +.. c:function:: uint32_t Py_PACK_VERSION(int major, int minor) + + Equivalent to ``Py_PACK_FULL_VERSION(major, minor, 0, 0, 0)``. + The result does not correspond to any Python release, but is useful + in numeric comparisons. + + .. versionadded:: 3.14 diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst index 41c0366d205086..209056ef2f8bce 100644 --- a/Doc/c-api/arg.rst +++ b/Doc/c-api/arg.rst @@ -319,7 +319,7 @@ Other objects .. _o_ampersand: -``O&`` (object) [*converter*, *anything*] +``O&`` (object) [*converter*, *address*] Convert a Python object to a C variable through a *converter* function. This takes two arguments: the first is a function, the second is the address of a C variable (of arbitrary type), converted to :c:expr:`void *`. The *converter* @@ -333,14 +333,20 @@ Other objects the conversion has failed. When the conversion fails, the *converter* function should raise an exception and leave the content of *address* unmodified. - If the *converter* returns ``Py_CLEANUP_SUPPORTED``, it may get called a + .. c:macro:: Py_CLEANUP_SUPPORTED + :no-typesetting: + + If the *converter* returns :c:macro:`!Py_CLEANUP_SUPPORTED`, it may get called a second time if the argument parsing eventually fails, giving the converter a chance to release any memory that it had already allocated. In this second call, the *object* parameter will be ``NULL``; *address* will have the same value as in the original call. + Examples of converters: :c:func:`PyUnicode_FSConverter` and + :c:func:`PyUnicode_FSDecoder`. + .. versionchanged:: 3.1 - ``Py_CLEANUP_SUPPORTED`` was added. + :c:macro:`!Py_CLEANUP_SUPPORTED` was added. ``p`` (:class:`bool`) [int] Tests the value passed in for truth (a boolean **p**\ redicate) and converts diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index 6e48644c8fef8b..1cab3ce3061ec9 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -325,3 +325,24 @@ Importing Modules If Python is initialized multiple times, :c:func:`PyImport_AppendInittab` or :c:func:`PyImport_ExtendInittab` must be called before each Python initialization. + + +.. c:function:: PyObject* PyImport_ImportModuleAttr(PyObject *mod_name, PyObject *attr_name) + + Import the module *mod_name* and get its attribute *attr_name*. + + Names must be Python :class:`str` objects. + + Helper function combining :c:func:`PyImport_Import` and + :c:func:`PyObject_GetAttr`. For example, it can raise :exc:`ImportError` if + the module is not found, and :exc:`AttributeError` if the attribute doesn't + exist. + + .. versionadded:: 3.14 + +.. c:function:: PyObject* PyImport_ImportModuleAttrString(const char *mod_name, const char *attr_name) + + Similar to :c:func:`PyImport_ImportModuleAttr`, but names are UTF-8 encoded + strings instead of Python :class:`str` objects. + + .. versionadded:: 3.14 diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index dd63dd013e32dc..8e3be97dfeefd1 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -109,7 +109,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-b` option. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_DebugFlag @@ -123,7 +123,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-d` option and the :envvar:`PYTHONDEBUG` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_DontWriteBytecodeFlag @@ -137,7 +137,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-B` option and the :envvar:`PYTHONDONTWRITEBYTECODE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_FrozenFlag @@ -150,7 +150,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Private flag used by ``_freeze_module`` and ``frozenmain`` programs. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_HashRandomizationFlag @@ -165,7 +165,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. If the flag is non-zero, read the :envvar:`PYTHONHASHSEED` environment variable to initialize the secret hash seed. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_IgnoreEnvironmentFlag @@ -178,7 +178,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-E` and :option:`-I` options. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_InspectFlag @@ -193,7 +193,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-i` option and the :envvar:`PYTHONINSPECT` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_InteractiveFlag @@ -218,7 +218,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. versionadded:: 3.4 - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_LegacyWindowsFSEncodingFlag @@ -237,7 +237,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. availability:: Windows. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_LegacyWindowsStdioFlag @@ -255,7 +255,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. availability:: Windows. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_NoSiteFlag @@ -270,7 +270,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-S` option. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_NoUserSiteDirectory @@ -284,7 +284,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-s` and :option:`-I` options, and the :envvar:`PYTHONNOUSERSITE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_OptimizeFlag @@ -295,7 +295,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-O` option and the :envvar:`PYTHONOPTIMIZE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_QuietFlag @@ -309,7 +309,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. versionadded:: 3.2 - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_UnbufferedStdioFlag @@ -322,7 +322,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-u` option and the :envvar:`PYTHONUNBUFFERED` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_VerboseFlag @@ -338,7 +338,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-v` option and the :envvar:`PYTHONVERBOSE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 Initializing and finalizing the interpreter @@ -606,7 +606,7 @@ Process-wide parameters Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a :c:expr:`wchar_*` string. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: wchar_t* Py_GetProgramName() @@ -622,7 +622,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. .. c:function:: wchar_t* Py_GetPrefix() @@ -644,8 +645,10 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.base_prefix` instead, or :data:`sys.prefix` if - :ref:`virtual environments ` need to be handled. + Use :c:func:`PyConfig_Get("base_prefix") ` + (:data:`sys.base_prefix`) instead. Use :c:func:`PyConfig_Get("prefix") + ` (:data:`sys.prefix`) if :ref:`virtual environments + ` need to be handled. .. c:function:: wchar_t* Py_GetExecPrefix() @@ -690,9 +693,11 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.base_exec_prefix` instead, or :data:`sys.exec_prefix` if - :ref:`virtual environments ` need to be handled. - + Use :c:func:`PyConfig_Get("base_exec_prefix") ` + (:data:`sys.base_exec_prefix`) instead. Use + :c:func:`PyConfig_Get("exec_prefix") ` + (:data:`sys.exec_prefix`) if :ref:`virtual environments ` need + to be handled. .. c:function:: wchar_t* Py_GetProgramFullPath() @@ -712,7 +717,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. .. c:function:: wchar_t* Py_GetPath() @@ -740,8 +746,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.path` instead. - + Use :c:func:`PyConfig_Get("module_search_paths") ` + (:data:`sys.path`) instead. .. c:function:: const char* Py_GetVersion() @@ -868,7 +874,7 @@ Process-wide parameters .. XXX impl. doesn't seem consistent in allowing ``0``/``NULL`` for the params; check w/ Guido. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: void PySys_SetArgv(int argc, wchar_t **argv) @@ -889,7 +895,7 @@ Process-wide parameters .. versionchanged:: 3.4 The *updatepath* value depends on :option:`-I`. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: void Py_SetPythonHome(const wchar_t *home) @@ -910,7 +916,7 @@ Process-wide parameters Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a :c:expr:`wchar_*` string. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: wchar_t* Py_GetPythonHome() @@ -926,8 +932,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :c:member:`PyConfig.home` or :envvar:`PYTHONHOME` environment - variable instead. + Use :c:func:`PyConfig_Get("home") ` or the + :envvar:`PYTHONHOME` environment variable instead. .. _threads: @@ -1492,6 +1498,17 @@ All of the following functions must be called after :c:func:`Py_Initialize`. .. versionadded:: 3.8 + +.. c:function:: PyObject* PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp) + + Return a :term:`strong reference` to the ``__main__`` `module object `_ + for the given interpreter. + + The caller must hold the GIL. + + .. versionadded:: 3.13 + + .. c:type:: PyObject* (*_PyFrameEvalFunction)(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) Type of a frame evaluation function. diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 6b33d93a9f2af9..b791d3cdc5d95c 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -6,28 +6,15 @@ Python Initialization Configuration *********************************** -.. _pyconfig_api: - -PyConfig C API -============== -.. versionadded:: 3.8 - -Python can be initialized with :c:func:`Py_InitializeFromConfig` and the -:c:type:`PyConfig` structure. It can be preinitialized with -:c:func:`Py_PreInitialize` and the :c:type:`PyPreConfig` structure. +.. _pyinitconfig_api: -There are two kinds of configuration: +PyInitConfig C API +================== -* The :ref:`Python Configuration ` can be used to build a - customized Python which behaves as the regular Python. For example, - environment variables and command line arguments are used to configure - Python. +.. versionadded:: 3.14 -* The :ref:`Isolated Configuration ` can be used to embed - Python into an application. It isolates Python from the system. For example, - environment variables are ignored, the LC_CTYPE locale is left unchanged and - no signal handler is registered. +Python can be initialized with :c:func:`Py_InitializeFromInitConfig`. The :c:func:`Py_RunMain` function can be used to write a customized Python program. @@ -35,2000 +22,2267 @@ program. See also :ref:`Initialization, Finalization, and Threads `. .. seealso:: - :pep:`587` "Python Initialization Configuration". + :pep:`741` "Python Configuration C API". Example ------- -Example of customized Python always running in isolated mode:: - - int main(int argc, char **argv) - { - PyStatus status; +Example of customized Python always running with the :ref:`Python Development +Mode ` enabled; return ``-1`` on error: - PyConfig config; - PyConfig_InitPythonConfig(&config); - config.isolated = 1; +.. code-block:: c - /* Decode command line arguments. - Implicitly preinitialize Python (in isolated mode). */ - status = PyConfig_SetBytesArgv(&config, argc, argv); - if (PyStatus_Exception(status)) { - goto exception; + int init_python(void) + { + PyInitConfig *config = PyInitConfig_Create(); + if (config == NULL) { + printf("PYTHON INIT ERROR: memory allocation failed\n"); + return -1; } - status = Py_InitializeFromConfig(&config); - if (PyStatus_Exception(status)) { - goto exception; + // Enable the Python Development Mode + if (PyInitConfig_SetInt(config, "dev_mode", 1) < 0) { + goto error; } - PyConfig_Clear(&config); - return Py_RunMain(); + // Initialize Python with the configuration + if (Py_InitializeFromInitConfig(config) < 0) { + goto error; + } + PyInitConfig_Free(config); + return 0; - exception: - PyConfig_Clear(&config); - if (PyStatus_IsExit(status)) { - return status.exitcode; + error: + { + // Display the error message. + // + // This uncommon braces style is used, because you cannot make + // goto targets point to variable declarations. + const char *err_msg; + (void)PyInitConfig_GetError(config, &err_msg); + printf("PYTHON INIT ERROR: %s\n", err_msg); + PyInitConfig_Free(config); + return -1; } - /* Display the error message and exit the process with - non-zero exit code */ - Py_ExitStatusException(status); } +Create Config +------------- -PyWideStringList ----------------- +.. c:struct:: PyInitConfig -.. c:type:: PyWideStringList + Opaque structure to configure the Python initialization. - List of ``wchar_t*`` strings. - If *length* is non-zero, *items* must be non-``NULL`` and all strings must be - non-``NULL``. +.. c:function:: PyInitConfig* PyInitConfig_Create(void) - .. c:namespace:: NULL + Create a new initialization configuration using :ref:`Isolated Configuration + ` default values. - Methods: + It must be freed by :c:func:`PyInitConfig_Free`. - .. c:function:: PyStatus PyWideStringList_Append(PyWideStringList *list, const wchar_t *item) + Return ``NULL`` on memory allocation failure. - Append *item* to *list*. - Python must be preinitialized to call this function. +.. c:function:: void PyInitConfig_Free(PyInitConfig *config) - .. c:function:: PyStatus PyWideStringList_Insert(PyWideStringList *list, Py_ssize_t index, const wchar_t *item) + Free memory of the initialization configuration *config*. - Insert *item* into *list* at *index*. + If *config* is ``NULL``, no operation is performed. - If *index* is greater than or equal to *list* length, append *item* to - *list*. - *index* must be greater than or equal to ``0``. +Error Handling +-------------- - Python must be preinitialized to call this function. +.. c:function:: int PyInitConfig_GetError(PyInitConfig* config, const char **err_msg) - .. c:namespace:: PyWideStringList + Get the *config* error message. - Structure fields: + * Set *\*err_msg* and return ``1`` if an error is set. + * Set *\*err_msg* to ``NULL`` and return ``0`` otherwise. - .. c:member:: Py_ssize_t length + An error message is an UTF-8 encoded string. - List length. + If *config* has an exit code, format the exit code as an error + message. - .. c:member:: wchar_t** items + The error message remains valid until another ``PyInitConfig`` + function is called with *config*. The caller doesn't have to free the + error message. - List items. -PyStatus --------- +.. c:function:: int PyInitConfig_GetExitCode(PyInitConfig* config, int *exitcode) -.. c:type:: PyStatus + Get the *config* exit code. - Structure to store an initialization function status: success, error - or exit. + * Set *\*exitcode* and return ``1`` if *config* has an exit code set. + * Return ``0`` if *config* has no exit code set. - For an error, it can store the C function name which created the error. + Only the ``Py_InitializeFromInitConfig()`` function can set an exit + code if the ``parse_argv`` option is non-zero. - Structure fields: + An exit code can be set when parsing the command line failed (exit + code ``2``) or when a command line option asks to display the command + line help (exit code ``0``). - .. c:member:: int exitcode - Exit code. Argument passed to ``exit()``. +Get Options +----------- - .. c:member:: const char *err_msg +The configuration option *name* parameter must be a non-NULL null-terminated +UTF-8 encoded string. See :ref:`Configuration Options `. - Error message. +.. c:function:: int PyInitConfig_HasOption(PyInitConfig *config, const char *name) - .. c:member:: const char *func + Test if the configuration has an option called *name*. - Name of the function which created an error, can be ``NULL``. + Return ``1`` if the option exists, or return ``0`` otherwise. - .. c:namespace:: NULL - Functions to create a status: +.. c:function:: int PyInitConfig_GetInt(PyInitConfig *config, const char *name, int64_t *value) - .. c:function:: PyStatus PyStatus_Ok(void) + Get an integer configuration option. - Success. + * Set *\*value*, and return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - .. c:function:: PyStatus PyStatus_Error(const char *err_msg) - Initialization error with a message. +.. c:function:: int PyInitConfig_GetStr(PyInitConfig *config, const char *name, char **value) - *err_msg* must not be ``NULL``. + Get a string configuration option as a null-terminated UTF-8 + encoded string. - .. c:function:: PyStatus PyStatus_NoMemory(void) + * Set *\*value*, and return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - Memory allocation failure (out of memory). + *\*value* can be set to ``NULL`` if the option is an optional string and the + option is unset. - .. c:function:: PyStatus PyStatus_Exit(int exitcode) + On success, the string must be released with ``free(value)`` if it's not + ``NULL``. - Exit Python with the specified exit code. - Functions to handle a status: +.. c:function:: int PyInitConfig_GetStrList(PyInitConfig *config, const char *name, size_t *length, char ***items) - .. c:function:: int PyStatus_Exception(PyStatus status) + Get a string list configuration option as an array of + null-terminated UTF-8 encoded strings. - Is the status an error or an exit? If true, the exception must be - handled; by calling :c:func:`Py_ExitStatusException` for example. + * Set *\*length* and *\*value*, and return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - .. c:function:: int PyStatus_IsError(PyStatus status) + On success, the string list must be released with + ``PyInitConfig_FreeStrList(length, items)``. - Is the result an error? - .. c:function:: int PyStatus_IsExit(PyStatus status) +.. c:function:: void PyInitConfig_FreeStrList(size_t length, char **items) - Is the result an exit? + Free memory of a string list created by + ``PyInitConfig_GetStrList()``. - .. c:function:: void Py_ExitStatusException(PyStatus status) - Call ``exit(exitcode)`` if *status* is an exit. Print the error - message and exit with a non-zero exit code if *status* is an error. Must - only be called if ``PyStatus_Exception(status)`` is non-zero. +Set Options +----------- -.. note:: - Internally, Python uses macros which set ``PyStatus.func``, - whereas functions to create a status set ``func`` to ``NULL``. +The configuration option *name* parameter must be a non-NULL null-terminated +UTF-8 encoded string. See :ref:`Configuration Options `. -Example:: +Some configuration options have side effects on other options. This logic is +only implemented when ``Py_InitializeFromInitConfig()`` is called, not by the +"Set" functions below. For example, setting ``dev_mode`` to ``1`` does not set +``faulthandler`` to ``1``. - PyStatus alloc(void **ptr, size_t size) - { - *ptr = PyMem_RawMalloc(size); - if (*ptr == NULL) { - return PyStatus_NoMemory(); - } - return PyStatus_Ok(); - } +.. c:function:: int PyInitConfig_SetInt(PyInitConfig *config, const char *name, int64_t value) - int main(int argc, char **argv) - { - void *ptr; - PyStatus status = alloc(&ptr, 16); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - PyMem_Free(ptr); - return 0; - } + Set an integer configuration option. + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. -PyPreConfig ------------ -.. c:type:: PyPreConfig +.. c:function:: int PyInitConfig_SetStr(PyInitConfig *config, const char *name, const char *value) - Structure used to preinitialize Python. + Set a string configuration option from a null-terminated UTF-8 + encoded string. The string is copied. - .. c:namespace:: NULL + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - Function to initialize a preconfiguration: - .. c:function:: void PyPreConfig_InitPythonConfig(PyPreConfig *preconfig) +.. c:function:: int PyInitConfig_SetStrList(PyInitConfig *config, const char *name, size_t length, char * const *items) - Initialize the preconfiguration with :ref:`Python Configuration - `. + Set a string list configuration option from an array of + null-terminated UTF-8 encoded strings. The string list is copied. - .. c:function:: void PyPreConfig_InitIsolatedConfig(PyPreConfig *preconfig) + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - Initialize the preconfiguration with :ref:`Isolated Configuration - `. - .. c:namespace:: PyPreConfig +Module +------ - Structure fields: +.. c:function:: int PyInitConfig_AddModule(PyInitConfig *config, const char *name, PyObject* (*initfunc)(void)) - .. c:member:: int allocator + Add a built-in extension module to the table of built-in modules. - Name of the Python memory allocators: + The new module can be imported by the name *name*, and uses the function + *initfunc* as the initialization function called on the first attempted + import. - * ``PYMEM_ALLOCATOR_NOT_SET`` (``0``): don't change memory allocators - (use defaults). - * ``PYMEM_ALLOCATOR_DEFAULT`` (``1``): :ref:`default memory allocators - `. - * ``PYMEM_ALLOCATOR_DEBUG`` (``2``): :ref:`default memory allocators - ` with :ref:`debug hooks - `. - * ``PYMEM_ALLOCATOR_MALLOC`` (``3``): use ``malloc()`` of the C library. - * ``PYMEM_ALLOCATOR_MALLOC_DEBUG`` (``4``): force usage of - ``malloc()`` with :ref:`debug hooks `. - * ``PYMEM_ALLOCATOR_PYMALLOC`` (``5``): :ref:`Python pymalloc memory - allocator `. - * ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` (``6``): :ref:`Python pymalloc - memory allocator ` with :ref:`debug hooks - `. - * ``PYMEM_ALLOCATOR_MIMALLOC`` (``6``): use ``mimalloc``, a fast - malloc replacement. - * ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` (``7``): use ``mimalloc``, a fast - malloc replacement with :ref:`debug hooks `. + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. + If Python is initialized multiple times, ``PyInitConfig_AddModule()`` must + be called at each Python initialization. - ``PYMEM_ALLOCATOR_PYMALLOC`` and ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` are - not supported if Python is :option:`configured using --without-pymalloc - <--without-pymalloc>`. + Similar to the :c:func:`PyImport_AppendInittab` function. - ``PYMEM_ALLOCATOR_MIMALLOC`` and ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` are - not supported if Python is :option:`configured using --without-mimalloc - <--without-mimalloc>` or if the underlying atomic support isn't - available. - See :ref:`Memory Management `. +Initialize Python +----------------- - Default: ``PYMEM_ALLOCATOR_NOT_SET``. +.. c:function:: int Py_InitializeFromInitConfig(PyInitConfig *config) - .. c:member:: int configure_locale + Initialize Python from the initialization configuration. - Set the LC_CTYPE locale to the user preferred locale. + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. + * Set an exit code in *config* and return ``-1`` if Python wants to + exit. - If equals to ``0``, set :c:member:`~PyPreConfig.coerce_c_locale` and - :c:member:`~PyPreConfig.coerce_c_locale_warn` members to ``0``. + See ``PyInitConfig_GetExitcode()`` for the exit code case. - See the :term:`locale encoding`. - Default: ``1`` in Python config, ``0`` in isolated config. +.. _pyinitconfig-opts: + +Configuration Options +===================== + +.. list-table:: + :header-rows: 1 + + * - Option + - PyConfig/PyPreConfig member + - Type + - Visibility + * - ``"allocator"`` + - :c:member:`allocator ` + - ``int`` + - Read-only + * - ``"argv"`` + - :c:member:`argv ` + - ``list[str]`` + - Public + * - ``"base_exec_prefix"`` + - :c:member:`base_exec_prefix ` + - ``str`` + - Public + * - ``"base_executable"`` + - :c:member:`base_executable ` + - ``str`` + - Public + * - ``"base_prefix"`` + - :c:member:`base_prefix ` + - ``str`` + - Public + * - ``"buffered_stdio"`` + - :c:member:`buffered_stdio ` + - ``bool`` + - Read-only + * - ``"bytes_warning"`` + - :c:member:`bytes_warning ` + - ``int`` + - Public + * - ``"check_hash_pycs_mode"`` + - :c:member:`check_hash_pycs_mode ` + - ``str`` + - Read-only + * - ``"code_debug_ranges"`` + - :c:member:`code_debug_ranges ` + - ``bool`` + - Read-only + * - ``"coerce_c_locale"`` + - :c:member:`coerce_c_locale ` + - ``bool`` + - Read-only + * - ``"coerce_c_locale_warn"`` + - :c:member:`coerce_c_locale_warn ` + - ``bool`` + - Read-only + * - ``"configure_c_stdio"`` + - :c:member:`configure_c_stdio ` + - ``bool`` + - Read-only + * - ``"configure_locale"`` + - :c:member:`configure_locale ` + - ``bool`` + - Read-only + * - ``"cpu_count"`` + - :c:member:`cpu_count ` + - ``int`` + - Read-only + * - ``"dev_mode"`` + - :c:member:`dev_mode ` + - ``bool`` + - Read-only + * - ``"dump_refs"`` + - :c:member:`dump_refs ` + - ``bool`` + - Read-only + * - ``"dump_refs_file"`` + - :c:member:`dump_refs_file ` + - ``str`` + - Read-only + * - ``"exec_prefix"`` + - :c:member:`exec_prefix ` + - ``str`` + - Public + * - ``"executable"`` + - :c:member:`executable ` + - ``str`` + - Public + * - ``"faulthandler"`` + - :c:member:`faulthandler ` + - ``bool`` + - Read-only + * - ``"filesystem_encoding"`` + - :c:member:`filesystem_encoding ` + - ``str`` + - Read-only + * - ``"filesystem_errors"`` + - :c:member:`filesystem_errors ` + - ``str`` + - Read-only + * - ``"hash_seed"`` + - :c:member:`hash_seed ` + - ``int`` + - Read-only + * - ``"home"`` + - :c:member:`home ` + - ``str`` + - Read-only + * - ``"import_time"`` + - :c:member:`import_time ` + - ``bool`` + - Read-only + * - ``"inspect"`` + - :c:member:`inspect ` + - ``bool`` + - Public + * - ``"install_signal_handlers"`` + - :c:member:`install_signal_handlers ` + - ``bool`` + - Read-only + * - ``"int_max_str_digits"`` + - :c:member:`int_max_str_digits ` + - ``int`` + - Public + * - ``"interactive"`` + - :c:member:`interactive ` + - ``bool`` + - Public + * - ``"isolated"`` + - :c:member:`isolated ` + - ``bool`` + - Read-only + * - ``"legacy_windows_fs_encoding"`` + - :c:member:`legacy_windows_fs_encoding ` + - ``bool`` + - Read-only + * - ``"legacy_windows_stdio"`` + - :c:member:`legacy_windows_stdio ` + - ``bool`` + - Read-only + * - ``"malloc_stats"`` + - :c:member:`malloc_stats ` + - ``bool`` + - Read-only + * - ``"module_search_paths"`` + - :c:member:`module_search_paths ` + - ``list[str]`` + - Public + * - ``"optimization_level"`` + - :c:member:`optimization_level ` + - ``int`` + - Public + * - ``"orig_argv"`` + - :c:member:`orig_argv ` + - ``list[str]`` + - Read-only + * - ``"parse_argv"`` + - :c:member:`parse_argv ` + - ``bool`` + - Read-only + * - ``"parser_debug"`` + - :c:member:`parser_debug ` + - ``bool`` + - Public + * - ``"pathconfig_warnings"`` + - :c:member:`pathconfig_warnings ` + - ``bool`` + - Read-only + * - ``"perf_profiling"`` + - :c:member:`perf_profiling ` + - ``bool`` + - Read-only + * - ``"platlibdir"`` + - :c:member:`platlibdir ` + - ``str`` + - Public + * - ``"prefix"`` + - :c:member:`prefix ` + - ``str`` + - Public + * - ``"program_name"`` + - :c:member:`program_name ` + - ``str`` + - Read-only + * - ``"pycache_prefix"`` + - :c:member:`pycache_prefix ` + - ``str`` + - Public + * - ``"quiet"`` + - :c:member:`quiet ` + - ``bool`` + - Public + * - ``"run_command"`` + - :c:member:`run_command ` + - ``str`` + - Read-only + * - ``"run_filename"`` + - :c:member:`run_filename ` + - ``str`` + - Read-only + * - ``"run_module"`` + - :c:member:`run_module ` + - ``str`` + - Read-only + * - ``"run_presite"`` + - :c:member:`run_presite ` + - ``str`` + - Read-only + * - ``"safe_path"`` + - :c:member:`safe_path ` + - ``bool`` + - Read-only + * - ``"show_ref_count"`` + - :c:member:`show_ref_count ` + - ``bool`` + - Read-only + * - ``"site_import"`` + - :c:member:`site_import ` + - ``bool`` + - Read-only + * - ``"skip_source_first_line"`` + - :c:member:`skip_source_first_line ` + - ``bool`` + - Read-only + * - ``"stdio_encoding"`` + - :c:member:`stdio_encoding ` + - ``str`` + - Read-only + * - ``"stdio_errors"`` + - :c:member:`stdio_errors ` + - ``str`` + - Read-only + * - ``"stdlib_dir"`` + - :c:member:`stdlib_dir ` + - ``str`` + - Public + * - ``"tracemalloc"`` + - :c:member:`tracemalloc ` + - ``int`` + - Read-only + * - ``"use_environment"`` + - :c:member:`use_environment ` + - ``bool`` + - Public + * - ``"use_frozen_modules"`` + - :c:member:`use_frozen_modules ` + - ``bool`` + - Read-only + * - ``"use_hash_seed"`` + - :c:member:`use_hash_seed ` + - ``bool`` + - Read-only + * - ``"user_site_directory"`` + - :c:member:`user_site_directory ` + - ``bool`` + - Read-only + * - ``"utf8_mode"`` + - :c:member:`utf8_mode ` + - ``bool`` + - Read-only + * - ``"verbose"`` + - :c:member:`verbose ` + - ``int`` + - Public + * - ``"warn_default_encoding"`` + - :c:member:`warn_default_encoding ` + - ``bool`` + - Read-only + * - ``"warnoptions"`` + - :c:member:`warnoptions ` + - ``list[str]`` + - Public + * - ``"write_bytecode"`` + - :c:member:`write_bytecode ` + - ``bool`` + - Public + * - ``"xoptions"`` + - :c:member:`xoptions ` + - ``dict[str, str]`` + - Public + * - ``"_pystats"`` + - :c:member:`_pystats ` + - ``bool`` + - Read-only + +Visibility: + +* Public: Can by get by :c:func:`PyConfig_Get` and set by + :c:func:`PyConfig_Set`. +* Read-only: Can by get by :c:func:`PyConfig_Get`, but cannot be set by + :c:func:`PyConfig_Set`. - .. c:member:: int coerce_c_locale - If equals to ``2``, coerce the C locale. +Runtime Python configuration API +================================ - If equals to ``1``, read the LC_CTYPE locale to decide if it should be - coerced. +At runtime, it's possible to get and set configuration options using +:c:func:`PyConfig_Get` and :c:func:`PyConfig_Set` functions. - See the :term:`locale encoding`. +The configuration option *name* parameter must be a non-NULL null-terminated +UTF-8 encoded string. See :ref:`Configuration Options `. - Default: ``-1`` in Python config, ``0`` in isolated config. +Some options are read from the :mod:`sys` attributes. For example, the option +``"argv"`` is read from :data:`sys.argv`. - .. c:member:: int coerce_c_locale_warn - If non-zero, emit a warning if the C locale is coerced. +.. c:function:: PyObject* PyConfig_Get(const char *name) - Default: ``-1`` in Python config, ``0`` in isolated config. + Get the current runtime value of a configuration option as a Python object. - .. c:member:: int dev_mode + * Return a new reference on success. + * Set an exception and return ``NULL`` on error. - :ref:`Python Development Mode `: see - :c:member:`PyConfig.dev_mode`. + The object type depends on the configuration option. It can be: - Default: ``-1`` in Python mode, ``0`` in isolated mode. + * ``bool`` + * ``int`` + * ``str`` + * ``list[str]`` + * ``dict[str, str]`` - .. c:member:: int isolated + The caller must hold the GIL. The function cannot be called before + Python initialization nor after Python finalization. - Isolated mode: see :c:member:`PyConfig.isolated`. + .. versionadded:: 3.14 - Default: ``0`` in Python mode, ``1`` in isolated mode. - .. c:member:: int legacy_windows_fs_encoding +.. c:function:: int PyConfig_GetInt(const char *name, int *value) - If non-zero: + Similar to :c:func:`PyConfig_Get`, but get the value as a C int. - * Set :c:member:`PyPreConfig.utf8_mode` to ``0``, - * Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``, - * Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``. + * Return ``0`` on success. + * Set an exception and return ``-1`` on error. - Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment - variable value. + .. versionadded:: 3.14 - Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for - Windows specific code. - Default: ``0``. +.. c:function:: PyObject* PyConfig_Names(void) - .. c:member:: int parse_argv + Get all configuration option names as a ``frozenset``. - If non-zero, :c:func:`Py_PreInitializeFromArgs` and - :c:func:`Py_PreInitializeFromBytesArgs` parse their ``argv`` argument the - same way the regular Python parses command line arguments: see - :ref:`Command Line Arguments `. + * Return a new reference on success. + * Set an exception and return ``NULL`` on error. - Default: ``1`` in Python config, ``0`` in isolated config. + The caller must hold the GIL. The function cannot be called before + Python initialization nor after Python finalization. - .. c:member:: int use_environment + .. versionadded:: 3.14 - Use :ref:`environment variables `? See - :c:member:`PyConfig.use_environment`. - Default: ``1`` in Python config and ``0`` in isolated config. +.. c:function:: int PyConfig_Set(const char *name, PyObject *value) - .. c:member:: int utf8_mode + Set the current runtime value of a configuration option. - If non-zero, enable the :ref:`Python UTF-8 Mode `. + * Raise a :exc:`ValueError` if there is no option *name*. + * Raise a :exc:`ValueError` if *value* is an invalid value. + * Raise a :exc:`ValueError` if the option is read-only (cannot be set). + * Raise a :exc:`TypeError` if *value* has not the proper type. - Set to ``0`` or ``1`` by the :option:`-X utf8 <-X>` command line option - and the :envvar:`PYTHONUTF8` environment variable. + The caller must hold the GIL. The function cannot be called before + Python initialization nor after Python finalization. - Also set to ``1`` if the ``LC_CTYPE`` locale is ``C`` or ``POSIX``. + .. versionadded:: 3.14 - Default: ``-1`` in Python config and ``0`` in isolated config. +.. _pyconfig_api: -.. _c-preinit: +PyConfig C API +============== -Preinitialize Python with PyPreConfig -------------------------------------- +.. versionadded:: 3.8 -The preinitialization of Python: +Python can be initialized with :c:func:`Py_InitializeFromConfig` and the +:c:type:`PyConfig` structure. It can be preinitialized with +:c:func:`Py_PreInitialize` and the :c:type:`PyPreConfig` structure. -* Set the Python memory allocators (:c:member:`PyPreConfig.allocator`) -* Configure the LC_CTYPE locale (:term:`locale encoding`) -* Set the :ref:`Python UTF-8 Mode ` - (:c:member:`PyPreConfig.utf8_mode`) +There are two kinds of configuration: -The current preconfiguration (``PyPreConfig`` type) is stored in -``_PyRuntime.preconfig``. +* The :ref:`Python Configuration ` can be used to build a + customized Python which behaves as the regular Python. For example, + environment variables and command line arguments are used to configure + Python. -Functions to preinitialize Python: +* The :ref:`Isolated Configuration ` can be used to embed + Python into an application. It isolates Python from the system. For example, + environment variables are ignored, the LC_CTYPE locale is left unchanged and + no signal handler is registered. -.. c:function:: PyStatus Py_PreInitialize(const PyPreConfig *preconfig) +The :c:func:`Py_RunMain` function can be used to write a customized Python +program. - Preinitialize Python from *preconfig* preconfiguration. +See also :ref:`Initialization, Finalization, and Threads `. - *preconfig* must not be ``NULL``. +.. seealso:: + :pep:`587` "Python Initialization Configuration". -.. c:function:: PyStatus Py_PreInitializeFromBytesArgs(const PyPreConfig *preconfig, int argc, char * const *argv) - Preinitialize Python from *preconfig* preconfiguration. +Example +------- - Parse *argv* command line arguments (bytes strings) if - :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. +Example of customized Python always running in isolated mode:: - *preconfig* must not be ``NULL``. + int main(int argc, char **argv) + { + PyStatus status; -.. c:function:: PyStatus Py_PreInitializeFromArgs(const PyPreConfig *preconfig, int argc, wchar_t * const * argv) + PyConfig config; + PyConfig_InitPythonConfig(&config); + config.isolated = 1; - Preinitialize Python from *preconfig* preconfiguration. + /* Decode command line arguments. + Implicitly preinitialize Python (in isolated mode). */ + status = PyConfig_SetBytesArgv(&config, argc, argv); + if (PyStatus_Exception(status)) { + goto exception; + } - Parse *argv* command line arguments (wide strings) if - :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. + status = Py_InitializeFromConfig(&config); + if (PyStatus_Exception(status)) { + goto exception; + } + PyConfig_Clear(&config); - *preconfig* must not be ``NULL``. + return Py_RunMain(); -The caller is responsible to handle exceptions (error or exit) using -:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. + exception: + PyConfig_Clear(&config); + if (PyStatus_IsExit(status)) { + return status.exitcode; + } + /* Display the error message and exit the process with + non-zero exit code */ + Py_ExitStatusException(status); + } -For :ref:`Python Configuration ` -(:c:func:`PyPreConfig_InitPythonConfig`), if Python is initialized with -command line arguments, the command line arguments must also be passed to -preinitialize Python, since they have an effect on the pre-configuration -like encodings. For example, the :option:`-X utf8 <-X>` command line option -enables the :ref:`Python UTF-8 Mode `. -``PyMem_SetAllocator()`` can be called after :c:func:`Py_PreInitialize` and -before :c:func:`Py_InitializeFromConfig` to install a custom memory allocator. -It can be called before :c:func:`Py_PreInitialize` if -:c:member:`PyPreConfig.allocator` is set to ``PYMEM_ALLOCATOR_NOT_SET``. +PyWideStringList +---------------- -Python memory allocation functions like :c:func:`PyMem_RawMalloc` must not be -used before the Python preinitialization, whereas calling directly ``malloc()`` -and ``free()`` is always safe. :c:func:`Py_DecodeLocale` must not be called -before the Python preinitialization. +.. c:type:: PyWideStringList -Example using the preinitialization to enable -the :ref:`Python UTF-8 Mode `:: + List of ``wchar_t*`` strings. - PyStatus status; - PyPreConfig preconfig; - PyPreConfig_InitPythonConfig(&preconfig); + If *length* is non-zero, *items* must be non-``NULL`` and all strings must be + non-``NULL``. - preconfig.utf8_mode = 1; + .. c:namespace:: NULL - status = Py_PreInitialize(&preconfig); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } + Methods: - /* at this point, Python speaks UTF-8 */ + .. c:function:: PyStatus PyWideStringList_Append(PyWideStringList *list, const wchar_t *item) - Py_Initialize(); - /* ... use Python API here ... */ - Py_Finalize(); + Append *item* to *list*. + Python must be preinitialized to call this function. -PyConfig --------- + .. c:function:: PyStatus PyWideStringList_Insert(PyWideStringList *list, Py_ssize_t index, const wchar_t *item) -.. c:type:: PyConfig + Insert *item* into *list* at *index*. - Structure containing most parameters to configure Python. + If *index* is greater than or equal to *list* length, append *item* to + *list*. - When done, the :c:func:`PyConfig_Clear` function must be used to release the - configuration memory. + *index* must be greater than or equal to ``0``. - .. c:namespace:: NULL + Python must be preinitialized to call this function. - Structure methods: + .. c:namespace:: PyWideStringList - .. c:function:: void PyConfig_InitPythonConfig(PyConfig *config) + Structure fields: - Initialize configuration with the :ref:`Python Configuration - `. + .. c:member:: Py_ssize_t length - .. c:function:: void PyConfig_InitIsolatedConfig(PyConfig *config) + List length. - Initialize configuration with the :ref:`Isolated Configuration - `. + .. c:member:: wchar_t** items - .. c:function:: PyStatus PyConfig_SetString(PyConfig *config, wchar_t * const *config_str, const wchar_t *str) + List items. - Copy the wide character string *str* into ``*config_str``. +PyStatus +-------- - :ref:`Preinitialize Python ` if needed. +.. c:type:: PyStatus - .. c:function:: PyStatus PyConfig_SetBytesString(PyConfig *config, wchar_t * const *config_str, const char *str) + Structure to store an initialization function status: success, error + or exit. - Decode *str* using :c:func:`Py_DecodeLocale` and set the result into - ``*config_str``. + For an error, it can store the C function name which created the error. - :ref:`Preinitialize Python ` if needed. + Structure fields: - .. c:function:: PyStatus PyConfig_SetArgv(PyConfig *config, int argc, wchar_t * const *argv) + .. c:member:: int exitcode - Set command line arguments (:c:member:`~PyConfig.argv` member of - *config*) from the *argv* list of wide character strings. + Exit code. Argument passed to ``exit()``. - :ref:`Preinitialize Python ` if needed. + .. c:member:: const char *err_msg - .. c:function:: PyStatus PyConfig_SetBytesArgv(PyConfig *config, int argc, char * const *argv) + Error message. - Set command line arguments (:c:member:`~PyConfig.argv` member of - *config*) from the *argv* list of bytes strings. Decode bytes using - :c:func:`Py_DecodeLocale`. + .. c:member:: const char *func - :ref:`Preinitialize Python ` if needed. + Name of the function which created an error, can be ``NULL``. - .. c:function:: PyStatus PyConfig_SetWideStringList(PyConfig *config, PyWideStringList *list, Py_ssize_t length, wchar_t **items) + .. c:namespace:: NULL - Set the list of wide strings *list* to *length* and *items*. + Functions to create a status: - :ref:`Preinitialize Python ` if needed. + .. c:function:: PyStatus PyStatus_Ok(void) - .. c:function:: PyStatus PyConfig_Read(PyConfig *config) + Success. - Read all Python configuration. + .. c:function:: PyStatus PyStatus_Error(const char *err_msg) - Fields which are already initialized are left unchanged. + Initialization error with a message. - Fields for :ref:`path configuration ` are no longer - calculated or modified when calling this function, as of Python 3.11. + *err_msg* must not be ``NULL``. - The :c:func:`PyConfig_Read` function only parses - :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` - is set to ``2`` after arguments are parsed. Since Python arguments are - stripped from :c:member:`PyConfig.argv`, parsing arguments twice would - parse the application options as Python options. + .. c:function:: PyStatus PyStatus_NoMemory(void) - :ref:`Preinitialize Python ` if needed. + Memory allocation failure (out of memory). - .. versionchanged:: 3.10 - The :c:member:`PyConfig.argv` arguments are now only parsed once, - :c:member:`PyConfig.parse_argv` is set to ``2`` after arguments are - parsed, and arguments are only parsed if - :c:member:`PyConfig.parse_argv` equals ``1``. + .. c:function:: PyStatus PyStatus_Exit(int exitcode) - .. versionchanged:: 3.11 - :c:func:`PyConfig_Read` no longer calculates all paths, and so fields - listed under :ref:`Python Path Configuration ` may - no longer be updated until :c:func:`Py_InitializeFromConfig` is - called. + Exit Python with the specified exit code. - .. c:function:: void PyConfig_Clear(PyConfig *config) + Functions to handle a status: - Release configuration memory. + .. c:function:: int PyStatus_Exception(PyStatus status) - Most ``PyConfig`` methods :ref:`preinitialize Python ` if needed. - In that case, the Python preinitialization configuration - (:c:type:`PyPreConfig`) in based on the :c:type:`PyConfig`. If configuration - fields which are in common with :c:type:`PyPreConfig` are tuned, they must - be set before calling a :c:type:`PyConfig` method: + Is the status an error or an exit? If true, the exception must be + handled; by calling :c:func:`Py_ExitStatusException` for example. - * :c:member:`PyConfig.dev_mode` - * :c:member:`PyConfig.isolated` - * :c:member:`PyConfig.parse_argv` - * :c:member:`PyConfig.use_environment` + .. c:function:: int PyStatus_IsError(PyStatus status) - Moreover, if :c:func:`PyConfig_SetArgv` or :c:func:`PyConfig_SetBytesArgv` - is used, this method must be called before other methods, since the - preinitialization configuration depends on command line arguments (if - :c:member:`~PyConfig.parse_argv` is non-zero). + Is the result an error? - The caller of these methods is responsible to handle exceptions (error or - exit) using ``PyStatus_Exception()`` and ``Py_ExitStatusException()``. + .. c:function:: int PyStatus_IsExit(PyStatus status) - .. c:namespace:: PyConfig + Is the result an exit? - Structure fields: + .. c:function:: void Py_ExitStatusException(PyStatus status) - .. c:member:: PyWideStringList argv + Call ``exit(exitcode)`` if *status* is an exit. Print the error + message and exit with a non-zero exit code if *status* is an error. Must + only be called if ``PyStatus_Exception(status)`` is non-zero. - .. index:: - single: main() - single: argv (in module sys) +.. note:: + Internally, Python uses macros which set ``PyStatus.func``, + whereas functions to create a status set ``func`` to ``NULL``. - Set :data:`sys.argv` command line arguments based on - :c:member:`~PyConfig.argv`. These parameters are similar to those passed - to the program's :c:func:`main` function with the difference that the - first entry should refer to the script file to be executed rather than - the executable hosting the Python interpreter. If there isn't a script - that will be run, the first entry in :c:member:`~PyConfig.argv` can be an - empty string. +Example:: - Set :c:member:`~PyConfig.parse_argv` to ``1`` to parse - :c:member:`~PyConfig.argv` the same way the regular Python parses Python - command line arguments and then to strip Python arguments from - :c:member:`~PyConfig.argv`. + PyStatus alloc(void **ptr, size_t size) + { + *ptr = PyMem_RawMalloc(size); + if (*ptr == NULL) { + return PyStatus_NoMemory(); + } + return PyStatus_Ok(); + } - If :c:member:`~PyConfig.argv` is empty, an empty string is added to - ensure that :data:`sys.argv` always exists and is never empty. + int main(int argc, char **argv) + { + void *ptr; + PyStatus status = alloc(&ptr, 16); + if (PyStatus_Exception(status)) { + Py_ExitStatusException(status); + } + PyMem_Free(ptr); + return 0; + } - Default: ``NULL``. - See also the :c:member:`~PyConfig.orig_argv` member. +PyPreConfig +----------- - .. c:member:: int safe_path +.. c:type:: PyPreConfig - If equals to zero, ``Py_RunMain()`` prepends a potentially unsafe path to - :data:`sys.path` at startup: + Structure used to preinitialize Python. - * If :c:member:`argv[0] ` is equal to ``L"-m"`` - (``python -m module``), prepend the current working directory. - * If running a script (``python script.py``), prepend the script's - directory. If it's a symbolic link, resolve symbolic links. - * Otherwise (``python -c code`` and ``python``), prepend an empty string, - which means the current working directory. + .. c:namespace:: NULL - Set to ``1`` by the :option:`-P` command line option and the - :envvar:`PYTHONSAFEPATH` environment variable. + Function to initialize a preconfiguration: - Default: ``0`` in Python config, ``1`` in isolated config. + .. c:function:: void PyPreConfig_InitPythonConfig(PyPreConfig *preconfig) - .. versionadded:: 3.11 + Initialize the preconfiguration with :ref:`Python Configuration + `. - .. c:member:: wchar_t* base_exec_prefix + .. c:function:: void PyPreConfig_InitIsolatedConfig(PyPreConfig *preconfig) - :data:`sys.base_exec_prefix`. + Initialize the preconfiguration with :ref:`Isolated Configuration + `. - Default: ``NULL``. + .. c:namespace:: PyPreConfig - Part of the :ref:`Python Path Configuration ` output. + Structure fields: - See also :c:member:`PyConfig.exec_prefix`. + .. c:member:: int allocator - .. c:member:: wchar_t* base_executable + Name of the Python memory allocators: - Python base executable: :data:`sys._base_executable`. + * ``PYMEM_ALLOCATOR_NOT_SET`` (``0``): don't change memory allocators + (use defaults). + * ``PYMEM_ALLOCATOR_DEFAULT`` (``1``): :ref:`default memory allocators + `. + * ``PYMEM_ALLOCATOR_DEBUG`` (``2``): :ref:`default memory allocators + ` with :ref:`debug hooks + `. + * ``PYMEM_ALLOCATOR_MALLOC`` (``3``): use ``malloc()`` of the C library. + * ``PYMEM_ALLOCATOR_MALLOC_DEBUG`` (``4``): force usage of + ``malloc()`` with :ref:`debug hooks `. + * ``PYMEM_ALLOCATOR_PYMALLOC`` (``5``): :ref:`Python pymalloc memory + allocator `. + * ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` (``6``): :ref:`Python pymalloc + memory allocator ` with :ref:`debug hooks + `. + * ``PYMEM_ALLOCATOR_MIMALLOC`` (``6``): use ``mimalloc``, a fast + malloc replacement. + * ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` (``7``): use ``mimalloc``, a fast + malloc replacement with :ref:`debug hooks `. - Set by the :envvar:`__PYVENV_LAUNCHER__` environment variable. - Set from :c:member:`PyConfig.executable` if ``NULL``. + ``PYMEM_ALLOCATOR_PYMALLOC`` and ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` are + not supported if Python is :option:`configured using --without-pymalloc + <--without-pymalloc>`. - Default: ``NULL``. + ``PYMEM_ALLOCATOR_MIMALLOC`` and ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` are + not supported if Python is :option:`configured using --without-mimalloc + <--without-mimalloc>` or if the underlying atomic support isn't + available. - Part of the :ref:`Python Path Configuration ` output. + See :ref:`Memory Management `. - See also :c:member:`PyConfig.executable`. + Default: ``PYMEM_ALLOCATOR_NOT_SET``. - .. c:member:: wchar_t* base_prefix + .. c:member:: int configure_locale - :data:`sys.base_prefix`. + Set the LC_CTYPE locale to the user preferred locale. - Default: ``NULL``. + If equals to ``0``, set :c:member:`~PyPreConfig.coerce_c_locale` and + :c:member:`~PyPreConfig.coerce_c_locale_warn` members to ``0``. - Part of the :ref:`Python Path Configuration ` output. + See the :term:`locale encoding`. - See also :c:member:`PyConfig.prefix`. + Default: ``1`` in Python config, ``0`` in isolated config. - .. c:member:: int buffered_stdio + .. c:member:: int coerce_c_locale - If equals to ``0`` and :c:member:`~PyConfig.configure_c_stdio` is non-zero, - disable buffering on the C streams stdout and stderr. + If equals to ``2``, coerce the C locale. - Set to ``0`` by the :option:`-u` command line option and the - :envvar:`PYTHONUNBUFFERED` environment variable. + If equals to ``1``, read the LC_CTYPE locale to decide if it should be + coerced. - stdin is always opened in buffered mode. + See the :term:`locale encoding`. - Default: ``1``. + Default: ``-1`` in Python config, ``0`` in isolated config. - .. c:member:: int bytes_warning + .. c:member:: int coerce_c_locale_warn - If equals to ``1``, issue a warning when comparing :class:`bytes` or - :class:`bytearray` with :class:`str`, or comparing :class:`bytes` with - :class:`int`. + If non-zero, emit a warning if the C locale is coerced. - If equal or greater to ``2``, raise a :exc:`BytesWarning` exception in these - cases. + Default: ``-1`` in Python config, ``0`` in isolated config. - Incremented by the :option:`-b` command line option. + .. c:member:: int dev_mode - Default: ``0``. + :ref:`Python Development Mode `: see + :c:member:`PyConfig.dev_mode`. - .. c:member:: int warn_default_encoding + Default: ``-1`` in Python mode, ``0`` in isolated mode. - If non-zero, emit a :exc:`EncodingWarning` warning when :class:`io.TextIOWrapper` - uses its default encoding. See :ref:`io-encoding-warning` for details. + .. c:member:: int isolated - Default: ``0``. + Isolated mode: see :c:member:`PyConfig.isolated`. - .. versionadded:: 3.10 + Default: ``0`` in Python mode, ``1`` in isolated mode. - .. c:member:: int code_debug_ranges + .. c:member:: int legacy_windows_fs_encoding - If equals to ``0``, disables the inclusion of the end line and column - mappings in code objects. Also disables traceback printing carets to - specific error locations. + If non-zero: - Set to ``0`` by the :envvar:`PYTHONNODEBUGRANGES` environment variable - and by the :option:`-X no_debug_ranges <-X>` command line option. + * Set :c:member:`PyPreConfig.utf8_mode` to ``0``, + * Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``, + * Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``. - Default: ``1``. + Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment + variable value. - .. versionadded:: 3.11 + Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for + Windows specific code. - .. c:member:: wchar_t* check_hash_pycs_mode + Default: ``0``. - Control the validation behavior of hash-based ``.pyc`` files: - value of the :option:`--check-hash-based-pycs` command line option. + .. c:member:: int parse_argv - Valid values: + If non-zero, :c:func:`Py_PreInitializeFromArgs` and + :c:func:`Py_PreInitializeFromBytesArgs` parse their ``argv`` argument the + same way the regular Python parses command line arguments: see + :ref:`Command Line Arguments `. - - ``L"always"``: Hash the source file for invalidation regardless of - value of the 'check_source' flag. - - ``L"never"``: Assume that hash-based pycs always are valid. - - ``L"default"``: The 'check_source' flag in hash-based pycs - determines invalidation. + Default: ``1`` in Python config, ``0`` in isolated config. - Default: ``L"default"``. + .. c:member:: int use_environment - See also :pep:`552` "Deterministic pycs". + Use :ref:`environment variables `? See + :c:member:`PyConfig.use_environment`. - .. c:member:: int configure_c_stdio + Default: ``1`` in Python config and ``0`` in isolated config. - If non-zero, configure C standard streams: + .. c:member:: int utf8_mode - * On Windows, set the binary mode (``O_BINARY``) on stdin, stdout and - stderr. - * If :c:member:`~PyConfig.buffered_stdio` equals zero, disable buffering - of stdin, stdout and stderr streams. - * If :c:member:`~PyConfig.interactive` is non-zero, enable stream - buffering on stdin and stdout (only stdout on Windows). + If non-zero, enable the :ref:`Python UTF-8 Mode `. - Default: ``1`` in Python config, ``0`` in isolated config. + Set to ``0`` or ``1`` by the :option:`-X utf8 <-X>` command line option + and the :envvar:`PYTHONUTF8` environment variable. - .. c:member:: int dev_mode + Also set to ``1`` if the ``LC_CTYPE`` locale is ``C`` or ``POSIX``. - If non-zero, enable the :ref:`Python Development Mode `. + Default: ``-1`` in Python config and ``0`` in isolated config. - Set to ``1`` by the :option:`-X dev <-X>` option and the - :envvar:`PYTHONDEVMODE` environment variable. - Default: ``-1`` in Python mode, ``0`` in isolated mode. +.. _c-preinit: - .. c:member:: int dump_refs +Preinitialize Python with PyPreConfig +------------------------------------- - Dump Python references? +The preinitialization of Python: - If non-zero, dump all objects which are still alive at exit. +* Set the Python memory allocators (:c:member:`PyPreConfig.allocator`) +* Configure the LC_CTYPE locale (:term:`locale encoding`) +* Set the :ref:`Python UTF-8 Mode ` + (:c:member:`PyPreConfig.utf8_mode`) - Set to ``1`` by the :envvar:`PYTHONDUMPREFS` environment variable. +The current preconfiguration (``PyPreConfig`` type) is stored in +``_PyRuntime.preconfig``. - Needs a special build of Python with the ``Py_TRACE_REFS`` macro defined: - see the :option:`configure --with-trace-refs option <--with-trace-refs>`. +Functions to preinitialize Python: - Default: ``0``. +.. c:function:: PyStatus Py_PreInitialize(const PyPreConfig *preconfig) - .. c:member:: wchar_t* exec_prefix + Preinitialize Python from *preconfig* preconfiguration. - The site-specific directory prefix where the platform-dependent Python - files are installed: :data:`sys.exec_prefix`. + *preconfig* must not be ``NULL``. - Default: ``NULL``. +.. c:function:: PyStatus Py_PreInitializeFromBytesArgs(const PyPreConfig *preconfig, int argc, char * const *argv) - Part of the :ref:`Python Path Configuration ` output. + Preinitialize Python from *preconfig* preconfiguration. - See also :c:member:`PyConfig.base_exec_prefix`. + Parse *argv* command line arguments (bytes strings) if + :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. - .. c:member:: wchar_t* executable + *preconfig* must not be ``NULL``. - The absolute path of the executable binary for the Python interpreter: - :data:`sys.executable`. +.. c:function:: PyStatus Py_PreInitializeFromArgs(const PyPreConfig *preconfig, int argc, wchar_t * const * argv) - Default: ``NULL``. + Preinitialize Python from *preconfig* preconfiguration. - Part of the :ref:`Python Path Configuration ` output. + Parse *argv* command line arguments (wide strings) if + :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. - See also :c:member:`PyConfig.base_executable`. + *preconfig* must not be ``NULL``. - .. c:member:: int faulthandler +The caller is responsible to handle exceptions (error or exit) using +:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. - Enable faulthandler? +For :ref:`Python Configuration ` +(:c:func:`PyPreConfig_InitPythonConfig`), if Python is initialized with +command line arguments, the command line arguments must also be passed to +preinitialize Python, since they have an effect on the pre-configuration +like encodings. For example, the :option:`-X utf8 <-X>` command line option +enables the :ref:`Python UTF-8 Mode `. - If non-zero, call :func:`faulthandler.enable` at startup. +``PyMem_SetAllocator()`` can be called after :c:func:`Py_PreInitialize` and +before :c:func:`Py_InitializeFromConfig` to install a custom memory allocator. +It can be called before :c:func:`Py_PreInitialize` if +:c:member:`PyPreConfig.allocator` is set to ``PYMEM_ALLOCATOR_NOT_SET``. - Set to ``1`` by :option:`-X faulthandler <-X>` and the - :envvar:`PYTHONFAULTHANDLER` environment variable. +Python memory allocation functions like :c:func:`PyMem_RawMalloc` must not be +used before the Python preinitialization, whereas calling directly ``malloc()`` +and ``free()`` is always safe. :c:func:`Py_DecodeLocale` must not be called +before the Python preinitialization. - Default: ``-1`` in Python mode, ``0`` in isolated mode. +Example using the preinitialization to enable +the :ref:`Python UTF-8 Mode `:: - .. c:member:: wchar_t* filesystem_encoding + PyStatus status; + PyPreConfig preconfig; + PyPreConfig_InitPythonConfig(&preconfig); - :term:`Filesystem encoding `: - :func:`sys.getfilesystemencoding`. + preconfig.utf8_mode = 1; - On macOS, Android and VxWorks: use ``"utf-8"`` by default. + status = Py_PreInitialize(&preconfig); + if (PyStatus_Exception(status)) { + Py_ExitStatusException(status); + } - On Windows: use ``"utf-8"`` by default, or ``"mbcs"`` if - :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of - :c:type:`PyPreConfig` is non-zero. + /* at this point, Python speaks UTF-8 */ - Default encoding on other platforms: + Py_Initialize(); + /* ... use Python API here ... */ + Py_Finalize(); - * ``"utf-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. - * ``"ascii"`` if Python detects that ``nl_langinfo(CODESET)`` announces - the ASCII encoding, whereas the ``mbstowcs()`` function - decodes from a different encoding (usually Latin1). - * ``"utf-8"`` if ``nl_langinfo(CODESET)`` returns an empty string. - * Otherwise, use the :term:`locale encoding`: - ``nl_langinfo(CODESET)`` result. - At Python startup, the encoding name is normalized to the Python codec - name. For example, ``"ANSI_X3.4-1968"`` is replaced with ``"ascii"``. +PyConfig +-------- - See also the :c:member:`~PyConfig.filesystem_errors` member. +.. c:type:: PyConfig - .. c:member:: wchar_t* filesystem_errors + Structure containing most parameters to configure Python. - :term:`Filesystem error handler `: - :func:`sys.getfilesystemencodeerrors`. + When done, the :c:func:`PyConfig_Clear` function must be used to release the + configuration memory. - On Windows: use ``"surrogatepass"`` by default, or ``"replace"`` if - :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of - :c:type:`PyPreConfig` is non-zero. + .. c:namespace:: NULL - On other platforms: use ``"surrogateescape"`` by default. + Structure methods: - Supported error handlers: + .. c:function:: void PyConfig_InitPythonConfig(PyConfig *config) - * ``"strict"`` - * ``"surrogateescape"`` - * ``"surrogatepass"`` (only supported with the UTF-8 encoding) + Initialize configuration with the :ref:`Python Configuration + `. - See also the :c:member:`~PyConfig.filesystem_encoding` member. + .. c:function:: void PyConfig_InitIsolatedConfig(PyConfig *config) - .. c:member:: unsigned long hash_seed - .. c:member:: int use_hash_seed + Initialize configuration with the :ref:`Isolated Configuration + `. - Randomized hash function seed. + .. c:function:: PyStatus PyConfig_SetString(PyConfig *config, wchar_t * const *config_str, const wchar_t *str) - If :c:member:`~PyConfig.use_hash_seed` is zero, a seed is chosen randomly - at Python startup, and :c:member:`~PyConfig.hash_seed` is ignored. + Copy the wide character string *str* into ``*config_str``. - Set by the :envvar:`PYTHONHASHSEED` environment variable. + :ref:`Preinitialize Python ` if needed. - Default *use_hash_seed* value: ``-1`` in Python mode, ``0`` in isolated - mode. + .. c:function:: PyStatus PyConfig_SetBytesString(PyConfig *config, wchar_t * const *config_str, const char *str) - .. c:member:: wchar_t* home + Decode *str* using :c:func:`Py_DecodeLocale` and set the result into + ``*config_str``. - Set the default Python "home" directory, that is, the location of the - standard Python libraries (see :envvar:`PYTHONHOME`). + :ref:`Preinitialize Python ` if needed. - Set by the :envvar:`PYTHONHOME` environment variable. + .. c:function:: PyStatus PyConfig_SetArgv(PyConfig *config, int argc, wchar_t * const *argv) - Default: ``NULL``. + Set command line arguments (:c:member:`~PyConfig.argv` member of + *config*) from the *argv* list of wide character strings. - Part of the :ref:`Python Path Configuration ` input. + :ref:`Preinitialize Python ` if needed. - .. c:member:: int import_time + .. c:function:: PyStatus PyConfig_SetBytesArgv(PyConfig *config, int argc, char * const *argv) - If non-zero, profile import time. + Set command line arguments (:c:member:`~PyConfig.argv` member of + *config*) from the *argv* list of bytes strings. Decode bytes using + :c:func:`Py_DecodeLocale`. - Set the ``1`` by the :option:`-X importtime <-X>` option and the - :envvar:`PYTHONPROFILEIMPORTTIME` environment variable. + :ref:`Preinitialize Python ` if needed. - Default: ``0``. + .. c:function:: PyStatus PyConfig_SetWideStringList(PyConfig *config, PyWideStringList *list, Py_ssize_t length, wchar_t **items) - .. c:member:: int inspect + Set the list of wide strings *list* to *length* and *items*. - Enter interactive mode after executing a script or a command. + :ref:`Preinitialize Python ` if needed. - If greater than ``0``, enable inspect: when a script is passed as first - argument or the -c option is used, enter interactive mode after executing - the script or the command, even when :data:`sys.stdin` does not appear to - be a terminal. + .. c:function:: PyStatus PyConfig_Read(PyConfig *config) - Incremented by the :option:`-i` command line option. Set to ``1`` if the - :envvar:`PYTHONINSPECT` environment variable is non-empty. + Read all Python configuration. - Default: ``0``. + Fields which are already initialized are left unchanged. - .. c:member:: int install_signal_handlers + Fields for :ref:`path configuration ` are no longer + calculated or modified when calling this function, as of Python 3.11. - Install Python signal handlers? + The :c:func:`PyConfig_Read` function only parses + :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` + is set to ``2`` after arguments are parsed. Since Python arguments are + stripped from :c:member:`PyConfig.argv`, parsing arguments twice would + parse the application options as Python options. - Default: ``1`` in Python mode, ``0`` in isolated mode. + :ref:`Preinitialize Python ` if needed. - .. c:member:: int interactive + .. versionchanged:: 3.10 + The :c:member:`PyConfig.argv` arguments are now only parsed once, + :c:member:`PyConfig.parse_argv` is set to ``2`` after arguments are + parsed, and arguments are only parsed if + :c:member:`PyConfig.parse_argv` equals ``1``. - If greater than ``0``, enable the interactive mode (REPL). + .. versionchanged:: 3.11 + :c:func:`PyConfig_Read` no longer calculates all paths, and so fields + listed under :ref:`Python Path Configuration ` may + no longer be updated until :c:func:`Py_InitializeFromConfig` is + called. - Incremented by the :option:`-i` command line option. + .. c:function:: void PyConfig_Clear(PyConfig *config) - Default: ``0``. + Release configuration memory. - .. c:member:: int int_max_str_digits + Most ``PyConfig`` methods :ref:`preinitialize Python ` if needed. + In that case, the Python preinitialization configuration + (:c:type:`PyPreConfig`) in based on the :c:type:`PyConfig`. If configuration + fields which are in common with :c:type:`PyPreConfig` are tuned, they must + be set before calling a :c:type:`PyConfig` method: - Configures the :ref:`integer string conversion length limitation - `. An initial value of ``-1`` means the value will - be taken from the command line or environment or otherwise default to - 4300 (:data:`sys.int_info.default_max_str_digits`). A value of ``0`` - disables the limitation. Values greater than zero but less than 640 - (:data:`sys.int_info.str_digits_check_threshold`) are unsupported and - will produce an error. + * :c:member:`PyConfig.dev_mode` + * :c:member:`PyConfig.isolated` + * :c:member:`PyConfig.parse_argv` + * :c:member:`PyConfig.use_environment` - Configured by the :option:`-X int_max_str_digits <-X>` command line - flag or the :envvar:`PYTHONINTMAXSTRDIGITS` environment variable. + Moreover, if :c:func:`PyConfig_SetArgv` or :c:func:`PyConfig_SetBytesArgv` + is used, this method must be called before other methods, since the + preinitialization configuration depends on command line arguments (if + :c:member:`~PyConfig.parse_argv` is non-zero). - Default: ``-1`` in Python mode. 4300 - (:data:`sys.int_info.default_max_str_digits`) in isolated mode. + The caller of these methods is responsible to handle exceptions (error or + exit) using ``PyStatus_Exception()`` and ``Py_ExitStatusException()``. - .. versionadded:: 3.12 + .. c:namespace:: PyConfig - .. c:member:: int cpu_count + Structure fields: - If the value of :c:member:`~PyConfig.cpu_count` is not ``-1`` then it will - override the return values of :func:`os.cpu_count`, - :func:`os.process_cpu_count`, and :func:`multiprocessing.cpu_count`. + .. c:member:: PyWideStringList argv - Configured by the :samp:`-X cpu_count={n|default}` command line - flag or the :envvar:`PYTHON_CPU_COUNT` environment variable. + .. index:: + single: main() + single: argv (in module sys) - Default: ``-1``. + Set :data:`sys.argv` command line arguments based on + :c:member:`~PyConfig.argv`. These parameters are similar to those passed + to the program's :c:func:`main` function with the difference that the + first entry should refer to the script file to be executed rather than + the executable hosting the Python interpreter. If there isn't a script + that will be run, the first entry in :c:member:`~PyConfig.argv` can be an + empty string. - .. versionadded:: 3.13 + Set :c:member:`~PyConfig.parse_argv` to ``1`` to parse + :c:member:`~PyConfig.argv` the same way the regular Python parses Python + command line arguments and then to strip Python arguments from + :c:member:`~PyConfig.argv`. - .. c:member:: int isolated + If :c:member:`~PyConfig.argv` is empty, an empty string is added to + ensure that :data:`sys.argv` always exists and is never empty. - If greater than ``0``, enable isolated mode: + Default: ``NULL``. - * Set :c:member:`~PyConfig.safe_path` to ``1``: - don't prepend a potentially unsafe path to :data:`sys.path` at Python - startup, such as the current directory, the script's directory or an - empty string. - * Set :c:member:`~PyConfig.use_environment` to ``0``: ignore ``PYTHON`` - environment variables. - * Set :c:member:`~PyConfig.user_site_directory` to ``0``: don't add the user - site directory to :data:`sys.path`. - * Python REPL doesn't import :mod:`readline` nor enable default readline - configuration on interactive prompts. + See also the :c:member:`~PyConfig.orig_argv` member. - Set to ``1`` by the :option:`-I` command line option. + .. c:member:: int safe_path - Default: ``0`` in Python mode, ``1`` in isolated mode. + If equals to zero, ``Py_RunMain()`` prepends a potentially unsafe path to + :data:`sys.path` at startup: - See also the :ref:`Isolated Configuration ` and - :c:member:`PyPreConfig.isolated`. + * If :c:member:`argv[0] ` is equal to ``L"-m"`` + (``python -m module``), prepend the current working directory. + * If running a script (``python script.py``), prepend the script's + directory. If it's a symbolic link, resolve symbolic links. + * Otherwise (``python -c code`` and ``python``), prepend an empty string, + which means the current working directory. - .. c:member:: int legacy_windows_stdio + Set to ``1`` by the :option:`-P` command line option and the + :envvar:`PYTHONSAFEPATH` environment variable. - If non-zero, use :class:`io.FileIO` instead of - :class:`!io._WindowsConsoleIO` for :data:`sys.stdin`, :data:`sys.stdout` - and :data:`sys.stderr`. + Default: ``0`` in Python config, ``1`` in isolated config. - Set to ``1`` if the :envvar:`PYTHONLEGACYWINDOWSSTDIO` environment - variable is set to a non-empty string. + .. versionadded:: 3.11 - Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for - Windows specific code. + .. c:member:: wchar_t* base_exec_prefix - Default: ``0``. + :data:`sys.base_exec_prefix`. - See also the :pep:`528` (Change Windows console encoding to UTF-8). + Default: ``NULL``. - .. c:member:: int malloc_stats + Part of the :ref:`Python Path Configuration ` output. - If non-zero, dump statistics on :ref:`Python pymalloc memory allocator - ` at exit. + See also :c:member:`PyConfig.exec_prefix`. - Set to ``1`` by the :envvar:`PYTHONMALLOCSTATS` environment variable. + .. c:member:: wchar_t* base_executable - The option is ignored if Python is :option:`configured using - the --without-pymalloc option <--without-pymalloc>`. + Python base executable: :data:`sys._base_executable`. - Default: ``0``. + Set by the :envvar:`__PYVENV_LAUNCHER__` environment variable. - .. c:member:: wchar_t* platlibdir + Set from :c:member:`PyConfig.executable` if ``NULL``. - Platform library directory name: :data:`sys.platlibdir`. + Default: ``NULL``. - Set by the :envvar:`PYTHONPLATLIBDIR` environment variable. + Part of the :ref:`Python Path Configuration ` output. - Default: value of the ``PLATLIBDIR`` macro which is set by the - :option:`configure --with-platlibdir option <--with-platlibdir>` - (default: ``"lib"``, or ``"DLLs"`` on Windows). + See also :c:member:`PyConfig.executable`. - Part of the :ref:`Python Path Configuration ` input. + .. c:member:: wchar_t* base_prefix - .. versionadded:: 3.9 + :data:`sys.base_prefix`. - .. versionchanged:: 3.11 - This macro is now used on Windows to locate the standard - library extension modules, typically under ``DLLs``. However, - for compatibility, note that this value is ignored for any - non-standard layouts, including in-tree builds and virtual - environments. + Default: ``NULL``. - .. c:member:: wchar_t* pythonpath_env + Part of the :ref:`Python Path Configuration ` output. - Module search paths (:data:`sys.path`) as a string separated by ``DELIM`` - (:data:`os.pathsep`). + See also :c:member:`PyConfig.prefix`. - Set by the :envvar:`PYTHONPATH` environment variable. + .. c:member:: int buffered_stdio - Default: ``NULL``. + If equals to ``0`` and :c:member:`~PyConfig.configure_c_stdio` is non-zero, + disable buffering on the C streams stdout and stderr. - Part of the :ref:`Python Path Configuration ` input. + Set to ``0`` by the :option:`-u` command line option and the + :envvar:`PYTHONUNBUFFERED` environment variable. - .. c:member:: PyWideStringList module_search_paths - .. c:member:: int module_search_paths_set + stdin is always opened in buffered mode. - Module search paths: :data:`sys.path`. + Default: ``1``. - If :c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, - :c:func:`Py_InitializeFromConfig` will replace - :c:member:`~PyConfig.module_search_paths` and sets - :c:member:`~PyConfig.module_search_paths_set` to ``1``. + .. c:member:: int bytes_warning - Default: empty list (``module_search_paths``) and ``0`` - (``module_search_paths_set``). + If equals to ``1``, issue a warning when comparing :class:`bytes` or + :class:`bytearray` with :class:`str`, or comparing :class:`bytes` with + :class:`int`. - Part of the :ref:`Python Path Configuration ` output. + If equal or greater to ``2``, raise a :exc:`BytesWarning` exception in these + cases. - .. c:member:: int optimization_level + Incremented by the :option:`-b` command line option. - Compilation optimization level: + Default: ``0``. - * ``0``: Peephole optimizer, set ``__debug__`` to ``True``. - * ``1``: Level 0, remove assertions, set ``__debug__`` to ``False``. - * ``2``: Level 1, strip docstrings. + .. c:member:: int warn_default_encoding - Incremented by the :option:`-O` command line option. Set to the - :envvar:`PYTHONOPTIMIZE` environment variable value. + If non-zero, emit a :exc:`EncodingWarning` warning when :class:`io.TextIOWrapper` + uses its default encoding. See :ref:`io-encoding-warning` for details. Default: ``0``. - .. c:member:: PyWideStringList orig_argv + .. versionadded:: 3.10 - The list of the original command line arguments passed to the Python - executable: :data:`sys.orig_argv`. + .. c:member:: int code_debug_ranges - If :c:member:`~PyConfig.orig_argv` list is empty and - :c:member:`~PyConfig.argv` is not a list only containing an empty - string, :c:func:`PyConfig_Read` copies :c:member:`~PyConfig.argv` into - :c:member:`~PyConfig.orig_argv` before modifying - :c:member:`~PyConfig.argv` (if :c:member:`~PyConfig.parse_argv` is - non-zero). + If equals to ``0``, disables the inclusion of the end line and column + mappings in code objects. Also disables traceback printing carets to + specific error locations. - See also the :c:member:`~PyConfig.argv` member and the - :c:func:`Py_GetArgcArgv` function. + Set to ``0`` by the :envvar:`PYTHONNODEBUGRANGES` environment variable + and by the :option:`-X no_debug_ranges <-X>` command line option. - Default: empty list. + Default: ``1``. - .. versionadded:: 3.10 + .. versionadded:: 3.11 - .. c:member:: int parse_argv + .. c:member:: wchar_t* check_hash_pycs_mode - Parse command line arguments? + Control the validation behavior of hash-based ``.pyc`` files: + value of the :option:`--check-hash-based-pycs` command line option. - If equals to ``1``, parse :c:member:`~PyConfig.argv` the same way the regular - Python parses :ref:`command line arguments `, and strip - Python arguments from :c:member:`~PyConfig.argv`. + Valid values: - The :c:func:`PyConfig_Read` function only parses - :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` - is set to ``2`` after arguments are parsed. Since Python arguments are - stripped from :c:member:`PyConfig.argv`, parsing arguments twice would - parse the application options as Python options. + - ``L"always"``: Hash the source file for invalidation regardless of + value of the 'check_source' flag. + - ``L"never"``: Assume that hash-based pycs always are valid. + - ``L"default"``: The 'check_source' flag in hash-based pycs + determines invalidation. - Default: ``1`` in Python mode, ``0`` in isolated mode. + Default: ``L"default"``. - .. versionchanged:: 3.10 - The :c:member:`PyConfig.argv` arguments are now only parsed if - :c:member:`PyConfig.parse_argv` equals to ``1``. + See also :pep:`552` "Deterministic pycs". - .. c:member:: int parser_debug + .. c:member:: int configure_c_stdio - Parser debug mode. If greater than ``0``, turn on parser debugging output (for expert only, depending - on compilation options). + If non-zero, configure C standard streams: - Incremented by the :option:`-d` command line option. Set to the - :envvar:`PYTHONDEBUG` environment variable value. + * On Windows, set the binary mode (``O_BINARY``) on stdin, stdout and + stderr. + * If :c:member:`~PyConfig.buffered_stdio` equals zero, disable buffering + of stdin, stdout and stderr streams. + * If :c:member:`~PyConfig.interactive` is non-zero, enable stream + buffering on stdin and stdout (only stdout on Windows). - Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro - must be defined). + Default: ``1`` in Python config, ``0`` in isolated config. - Default: ``0``. - - .. c:member:: int pathconfig_warnings + .. c:member:: int dev_mode - If non-zero, calculation of path configuration is allowed to log - warnings into ``stderr``. If equals to ``0``, suppress these warnings. + If non-zero, enable the :ref:`Python Development Mode `. - Default: ``1`` in Python mode, ``0`` in isolated mode. + Set to ``1`` by the :option:`-X dev <-X>` option and the + :envvar:`PYTHONDEVMODE` environment variable. - Part of the :ref:`Python Path Configuration ` input. + Default: ``-1`` in Python mode, ``0`` in isolated mode. - .. versionchanged:: 3.11 - Now also applies on Windows. + .. c:member:: int dump_refs - .. c:member:: wchar_t* prefix + Dump Python references? - The site-specific directory prefix where the platform independent Python - files are installed: :data:`sys.prefix`. + If non-zero, dump all objects which are still alive at exit. - Default: ``NULL``. + Set to ``1`` by the :envvar:`PYTHONDUMPREFS` environment variable. - Part of the :ref:`Python Path Configuration ` output. + Needs a special build of Python with the ``Py_TRACE_REFS`` macro defined: + see the :option:`configure --with-trace-refs option <--with-trace-refs>`. - See also :c:member:`PyConfig.base_prefix`. + Default: ``0``. - .. c:member:: wchar_t* program_name + .. c:member:: wchar_t* dump_refs_file - Program name used to initialize :c:member:`~PyConfig.executable` and in - early error messages during Python initialization. + Filename where to dump Python references. - * On macOS, use :envvar:`PYTHONEXECUTABLE` environment variable if set. - * If the ``WITH_NEXT_FRAMEWORK`` macro is defined, use - :envvar:`__PYVENV_LAUNCHER__` environment variable if set. - * Use ``argv[0]`` of :c:member:`~PyConfig.argv` if available and - non-empty. - * Otherwise, use ``L"python"`` on Windows, or ``L"python3"`` on other - platforms. + Set by the :envvar:`PYTHONDUMPREFSFILE` environment variable. Default: ``NULL``. - Part of the :ref:`Python Path Configuration ` input. - - .. c:member:: wchar_t* pycache_prefix - - Directory where cached ``.pyc`` files are written: - :data:`sys.pycache_prefix`. + .. versionadded:: 3.11 - Set by the :option:`-X pycache_prefix=PATH <-X>` command line option and - the :envvar:`PYTHONPYCACHEPREFIX` environment variable. - The command-line option takes precedence. + .. c:member:: wchar_t* exec_prefix - If ``NULL``, :data:`sys.pycache_prefix` is set to ``None``. + The site-specific directory prefix where the platform-dependent Python + files are installed: :data:`sys.exec_prefix`. Default: ``NULL``. - .. c:member:: int quiet - - Quiet mode. If greater than ``0``, don't display the copyright and version at - Python startup in interactive mode. - - Incremented by the :option:`-q` command line option. - - Default: ``0``. + Part of the :ref:`Python Path Configuration ` output. - .. c:member:: wchar_t* run_command + See also :c:member:`PyConfig.base_exec_prefix`. - Value of the :option:`-c` command line option. + .. c:member:: wchar_t* executable - Used by :c:func:`Py_RunMain`. + The absolute path of the executable binary for the Python interpreter: + :data:`sys.executable`. Default: ``NULL``. - .. c:member:: wchar_t* run_filename - - Filename passed on the command line: trailing command line argument - without :option:`-c` or :option:`-m`. It is used by the - :c:func:`Py_RunMain` function. + Part of the :ref:`Python Path Configuration ` output. - For example, it is set to ``script.py`` by the ``python3 script.py arg`` - command line. + See also :c:member:`PyConfig.base_executable`. - See also the :c:member:`PyConfig.skip_source_first_line` option. + .. c:member:: int faulthandler - Default: ``NULL``. + Enable faulthandler? - .. c:member:: wchar_t* run_module + If non-zero, call :func:`faulthandler.enable` at startup. - Value of the :option:`-m` command line option. + Set to ``1`` by :option:`-X faulthandler <-X>` and the + :envvar:`PYTHONFAULTHANDLER` environment variable. - Used by :c:func:`Py_RunMain`. + Default: ``-1`` in Python mode, ``0`` in isolated mode. - Default: ``NULL``. + .. c:member:: wchar_t* filesystem_encoding - .. c:member:: wchar_t* run_presite + :term:`Filesystem encoding `: + :func:`sys.getfilesystemencoding`. - ``package.module`` path to module that should be imported before - ``site.py`` is run. + On macOS, Android and VxWorks: use ``"utf-8"`` by default. - Set by the :option:`-X presite=package.module <-X>` command-line - option and the :envvar:`PYTHON_PRESITE` environment variable. - The command-line option takes precedence. + On Windows: use ``"utf-8"`` by default, or ``"mbcs"`` if + :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of + :c:type:`PyPreConfig` is non-zero. - Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro - must be defined). + Default encoding on other platforms: - Default: ``NULL``. + * ``"utf-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. + * ``"ascii"`` if Python detects that ``nl_langinfo(CODESET)`` announces + the ASCII encoding, whereas the ``mbstowcs()`` function + decodes from a different encoding (usually Latin1). + * ``"utf-8"`` if ``nl_langinfo(CODESET)`` returns an empty string. + * Otherwise, use the :term:`locale encoding`: + ``nl_langinfo(CODESET)`` result. - .. c:member:: int show_ref_count + At Python startup, the encoding name is normalized to the Python codec + name. For example, ``"ANSI_X3.4-1968"`` is replaced with ``"ascii"``. - Show total reference count at exit (excluding :term:`immortal` objects)? + See also the :c:member:`~PyConfig.filesystem_errors` member. - Set to ``1`` by :option:`-X showrefcount <-X>` command line option. + .. c:member:: wchar_t* filesystem_errors - Needs a :ref:`debug build of Python ` (the ``Py_REF_DEBUG`` - macro must be defined). + :term:`Filesystem error handler `: + :func:`sys.getfilesystemencodeerrors`. - Default: ``0``. + On Windows: use ``"surrogatepass"`` by default, or ``"replace"`` if + :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of + :c:type:`PyPreConfig` is non-zero. - .. c:member:: int site_import + On other platforms: use ``"surrogateescape"`` by default. - Import the :mod:`site` module at startup? + Supported error handlers: - If equal to zero, disable the import of the module site and the - site-dependent manipulations of :data:`sys.path` that it entails. + * ``"strict"`` + * ``"surrogateescape"`` + * ``"surrogatepass"`` (only supported with the UTF-8 encoding) - Also disable these manipulations if the :mod:`site` module is explicitly - imported later (call :func:`site.main` if you want them to be triggered). + See also the :c:member:`~PyConfig.filesystem_encoding` member. - Set to ``0`` by the :option:`-S` command line option. + .. c:member:: int use_frozen_modules - :data:`sys.flags.no_site ` is set to the inverted value of - :c:member:`~PyConfig.site_import`. + If non-zero, use frozen modules. - Default: ``1``. + Set by the :envvar:`PYTHON_FROZEN_MODULES` environment variable. - .. c:member:: int skip_source_first_line + Default: ``1`` in a release build, or ``0`` in a :ref:`debug build + `. - If non-zero, skip the first line of the :c:member:`PyConfig.run_filename` - source. + .. c:member:: unsigned long hash_seed + .. c:member:: int use_hash_seed - It allows the usage of non-Unix forms of ``#!cmd``. This is intended for - a DOS specific hack only. + Randomized hash function seed. - Set to ``1`` by the :option:`-x` command line option. + If :c:member:`~PyConfig.use_hash_seed` is zero, a seed is chosen randomly + at Python startup, and :c:member:`~PyConfig.hash_seed` is ignored. - Default: ``0``. + Set by the :envvar:`PYTHONHASHSEED` environment variable. - .. c:member:: wchar_t* stdio_encoding - .. c:member:: wchar_t* stdio_errors + Default *use_hash_seed* value: ``-1`` in Python mode, ``0`` in isolated + mode. - Encoding and encoding errors of :data:`sys.stdin`, :data:`sys.stdout` and - :data:`sys.stderr` (but :data:`sys.stderr` always uses - ``"backslashreplace"`` error handler). + .. c:member:: wchar_t* home - Use the :envvar:`PYTHONIOENCODING` environment variable if it is - non-empty. + Set the default Python "home" directory, that is, the location of the + standard Python libraries (see :envvar:`PYTHONHOME`). - Default encoding: + Set by the :envvar:`PYTHONHOME` environment variable. - * ``"UTF-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. - * Otherwise, use the :term:`locale encoding`. + Default: ``NULL``. - Default error handler: + Part of the :ref:`Python Path Configuration ` input. - * On Windows: use ``"surrogateescape"``. - * ``"surrogateescape"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero, - or if the LC_CTYPE locale is "C" or "POSIX". - * ``"strict"`` otherwise. + .. c:member:: int import_time - See also :c:member:`PyConfig.legacy_windows_stdio`. + If non-zero, profile import time. - .. c:member:: int tracemalloc + Set the ``1`` by the :option:`-X importtime <-X>` option and the + :envvar:`PYTHONPROFILEIMPORTTIME` environment variable. - Enable tracemalloc? + Default: ``0``. - If non-zero, call :func:`tracemalloc.start` at startup. + .. c:member:: int inspect - Set by :option:`-X tracemalloc=N <-X>` command line option and by the - :envvar:`PYTHONTRACEMALLOC` environment variable. + Enter interactive mode after executing a script or a command. - Default: ``-1`` in Python mode, ``0`` in isolated mode. + If greater than ``0``, enable inspect: when a script is passed as first + argument or the -c option is used, enter interactive mode after executing + the script or the command, even when :data:`sys.stdin` does not appear to + be a terminal. - .. c:member:: int perf_profiling + Incremented by the :option:`-i` command line option. Set to ``1`` if the + :envvar:`PYTHONINSPECT` environment variable is non-empty. - Enable the Linux ``perf`` profiler support? + Default: ``0``. - If equals to ``1``, enable support for the Linux ``perf`` profiler. + .. c:member:: int install_signal_handlers - If equals to ``2``, enable support for the Linux ``perf`` profiler with - DWARF JIT support. + Install Python signal handlers? - Set to ``1`` by :option:`-X perf <-X>` command-line option and the - :envvar:`PYTHONPERFSUPPORT` environment variable. + Default: ``1`` in Python mode, ``0`` in isolated mode. - Set to ``2`` by the :option:`-X perf_jit <-X>` command-line option and - the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable. + .. c:member:: int interactive - Default: ``-1``. + If greater than ``0``, enable the interactive mode (REPL). - .. seealso:: - See :ref:`perf_profiling` for more information. + Incremented by the :option:`-i` command line option. - .. versionadded:: 3.12 + Default: ``0``. - .. c:member:: int use_environment + .. c:member:: int int_max_str_digits - Use :ref:`environment variables `? + Configures the :ref:`integer string conversion length limitation + `. An initial value of ``-1`` means the value will + be taken from the command line or environment or otherwise default to + 4300 (:data:`sys.int_info.default_max_str_digits`). A value of ``0`` + disables the limitation. Values greater than zero but less than 640 + (:data:`sys.int_info.str_digits_check_threshold`) are unsupported and + will produce an error. - If equals to zero, ignore the :ref:`environment variables - `. + Configured by the :option:`-X int_max_str_digits <-X>` command line + flag or the :envvar:`PYTHONINTMAXSTRDIGITS` environment variable. - Set to ``0`` by the :option:`-E` environment variable. + Default: ``-1`` in Python mode. 4300 + (:data:`sys.int_info.default_max_str_digits`) in isolated mode. - Default: ``1`` in Python config and ``0`` in isolated config. + .. versionadded:: 3.12 - .. c:member:: int use_system_logger + .. c:member:: int cpu_count - If non-zero, ``stdout`` and ``stderr`` will be redirected to the system - log. + If the value of :c:member:`~PyConfig.cpu_count` is not ``-1`` then it will + override the return values of :func:`os.cpu_count`, + :func:`os.process_cpu_count`, and :func:`multiprocessing.cpu_count`. - Only available on macOS 10.12 and later, and on iOS. + Configured by the :samp:`-X cpu_count={n|default}` command line + flag or the :envvar:`PYTHON_CPU_COUNT` environment variable. - Default: ``0`` (don't use system log). + Default: ``-1``. - .. versionadded:: 3.13.2 + .. versionadded:: 3.13 - .. c:member:: int user_site_directory + .. c:member:: int isolated - If non-zero, add the user site directory to :data:`sys.path`. + If greater than ``0``, enable isolated mode: - Set to ``0`` by the :option:`-s` and :option:`-I` command line options. + * Set :c:member:`~PyConfig.safe_path` to ``1``: + don't prepend a potentially unsafe path to :data:`sys.path` at Python + startup, such as the current directory, the script's directory or an + empty string. + * Set :c:member:`~PyConfig.use_environment` to ``0``: ignore ``PYTHON`` + environment variables. + * Set :c:member:`~PyConfig.user_site_directory` to ``0``: don't add the user + site directory to :data:`sys.path`. + * Python REPL doesn't import :mod:`readline` nor enable default readline + configuration on interactive prompts. - Set to ``0`` by the :envvar:`PYTHONNOUSERSITE` environment variable. + Set to ``1`` by the :option:`-I` command line option. - Default: ``1`` in Python mode, ``0`` in isolated mode. + Default: ``0`` in Python mode, ``1`` in isolated mode. - .. c:member:: int verbose + See also the :ref:`Isolated Configuration ` and + :c:member:`PyPreConfig.isolated`. - Verbose mode. If greater than ``0``, print a message each time a module is - imported, showing the place (filename or built-in module) from which - it is loaded. + .. c:member:: int legacy_windows_stdio - If greater than or equal to ``2``, print a message for each file that is - checked for when searching for a module. Also provides information on - module cleanup at exit. + If non-zero, use :class:`io.FileIO` instead of + :class:`!io._WindowsConsoleIO` for :data:`sys.stdin`, :data:`sys.stdout` + and :data:`sys.stderr`. - Incremented by the :option:`-v` command line option. + Set to ``1`` if the :envvar:`PYTHONLEGACYWINDOWSSTDIO` environment + variable is set to a non-empty string. - Set by the :envvar:`PYTHONVERBOSE` environment variable value. + Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for + Windows specific code. Default: ``0``. - .. c:member:: PyWideStringList warnoptions + See also the :pep:`528` (Change Windows console encoding to UTF-8). - Options of the :mod:`warnings` module to build warnings filters, lowest - to highest priority: :data:`sys.warnoptions`. + .. c:member:: int malloc_stats - The :mod:`warnings` module adds :data:`sys.warnoptions` in the reverse - order: the last :c:member:`PyConfig.warnoptions` item becomes the first - item of :data:`warnings.filters` which is checked first (highest - priority). + If non-zero, dump statistics on :ref:`Python pymalloc memory allocator + ` at exit. - The :option:`-W` command line options adds its value to - :c:member:`~PyConfig.warnoptions`, it can be used multiple times. + Set to ``1`` by the :envvar:`PYTHONMALLOCSTATS` environment variable. - The :envvar:`PYTHONWARNINGS` environment variable can also be used to add - warning options. Multiple options can be specified, separated by commas - (``,``). + The option is ignored if Python is :option:`configured using + the --without-pymalloc option <--without-pymalloc>`. - Default: empty list. + Default: ``0``. - .. c:member:: int write_bytecode + .. c:member:: wchar_t* platlibdir - If equal to ``0``, Python won't try to write ``.pyc`` files on the import of - source modules. + Platform library directory name: :data:`sys.platlibdir`. - Set to ``0`` by the :option:`-B` command line option and the - :envvar:`PYTHONDONTWRITEBYTECODE` environment variable. + Set by the :envvar:`PYTHONPLATLIBDIR` environment variable. - :data:`sys.dont_write_bytecode` is initialized to the inverted value of - :c:member:`~PyConfig.write_bytecode`. + Default: value of the ``PLATLIBDIR`` macro which is set by the + :option:`configure --with-platlibdir option <--with-platlibdir>` + (default: ``"lib"``, or ``"DLLs"`` on Windows). - Default: ``1``. + Part of the :ref:`Python Path Configuration ` input. - .. c:member:: PyWideStringList xoptions + .. versionadded:: 3.9 - Values of the :option:`-X` command line options: :data:`sys._xoptions`. + .. versionchanged:: 3.11 + This macro is now used on Windows to locate the standard + library extension modules, typically under ``DLLs``. However, + for compatibility, note that this value is ignored for any + non-standard layouts, including in-tree builds and virtual + environments. - Default: empty list. + .. c:member:: wchar_t* pythonpath_env -If :c:member:`~PyConfig.parse_argv` is non-zero, :c:member:`~PyConfig.argv` -arguments are parsed the same way the regular Python parses :ref:`command line -arguments `, and Python arguments are stripped from -:c:member:`~PyConfig.argv`. + Module search paths (:data:`sys.path`) as a string separated by ``DELIM`` + (:data:`os.pathsep`). -The :c:member:`~PyConfig.xoptions` options are parsed to set other options: see -the :option:`-X` command line option. + Set by the :envvar:`PYTHONPATH` environment variable. -.. versionchanged:: 3.9 + Default: ``NULL``. - The ``show_alloc_count`` field has been removed. + Part of the :ref:`Python Path Configuration ` input. + .. c:member:: PyWideStringList module_search_paths + .. c:member:: int module_search_paths_set -.. _init-from-config: + Module search paths: :data:`sys.path`. -Initialization with PyConfig ----------------------------- + If :c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, + :c:func:`Py_InitializeFromConfig` will replace + :c:member:`~PyConfig.module_search_paths` and sets + :c:member:`~PyConfig.module_search_paths_set` to ``1``. -Initializing the interpreter from a populated configuration struct is handled -by calling :c:func:`Py_InitializeFromConfig`. + Default: empty list (``module_search_paths``) and ``0`` + (``module_search_paths_set``). -The caller is responsible to handle exceptions (error or exit) using -:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. + Part of the :ref:`Python Path Configuration ` output. -If :c:func:`PyImport_FrozenModules`, :c:func:`PyImport_AppendInittab` or -:c:func:`PyImport_ExtendInittab` are used, they must be set or called after -Python preinitialization and before the Python initialization. If Python is -initialized multiple times, :c:func:`PyImport_AppendInittab` or -:c:func:`PyImport_ExtendInittab` must be called before each Python -initialization. + .. c:member:: int optimization_level -The current configuration (``PyConfig`` type) is stored in -``PyInterpreterState.config``. + Compilation optimization level: -Example setting the program name:: + * ``0``: Peephole optimizer, set ``__debug__`` to ``True``. + * ``1``: Level 0, remove assertions, set ``__debug__`` to ``False``. + * ``2``: Level 1, strip docstrings. - void init_python(void) - { - PyStatus status; + Incremented by the :option:`-O` command line option. Set to the + :envvar:`PYTHONOPTIMIZE` environment variable value. - PyConfig config; - PyConfig_InitPythonConfig(&config); + Default: ``0``. - /* Set the program name. Implicitly preinitialize Python. */ - status = PyConfig_SetString(&config, &config.program_name, - L"/path/to/my_program"); - if (PyStatus_Exception(status)) { - goto exception; - } + .. c:member:: PyWideStringList orig_argv - status = Py_InitializeFromConfig(&config); - if (PyStatus_Exception(status)) { - goto exception; - } - PyConfig_Clear(&config); - return; + The list of the original command line arguments passed to the Python + executable: :data:`sys.orig_argv`. - exception: - PyConfig_Clear(&config); - Py_ExitStatusException(status); - } + If :c:member:`~PyConfig.orig_argv` list is empty and + :c:member:`~PyConfig.argv` is not a list only containing an empty + string, :c:func:`PyConfig_Read` copies :c:member:`~PyConfig.argv` into + :c:member:`~PyConfig.orig_argv` before modifying + :c:member:`~PyConfig.argv` (if :c:member:`~PyConfig.parse_argv` is + non-zero). -More complete example modifying the default configuration, read the -configuration, and then override some parameters. Note that since -3.11, many parameters are not calculated until initialization, and -so values cannot be read from the configuration structure. Any values -set before initialize is called will be left unchanged by -initialization:: + See also the :c:member:`~PyConfig.argv` member and the + :c:func:`Py_GetArgcArgv` function. - PyStatus init_python(const char *program_name) - { - PyStatus status; + Default: empty list. - PyConfig config; - PyConfig_InitPythonConfig(&config); + .. versionadded:: 3.10 - /* Set the program name before reading the configuration - (decode byte string from the locale encoding). + .. c:member:: int parse_argv - Implicitly preinitialize Python. */ - status = PyConfig_SetBytesString(&config, &config.program_name, - program_name); - if (PyStatus_Exception(status)) { - goto done; - } + Parse command line arguments? - /* Read all configuration at once */ - status = PyConfig_Read(&config); - if (PyStatus_Exception(status)) { - goto done; - } + If equals to ``1``, parse :c:member:`~PyConfig.argv` the same way the regular + Python parses :ref:`command line arguments `, and strip + Python arguments from :c:member:`~PyConfig.argv`. - /* Specify sys.path explicitly */ - /* If you want to modify the default set of paths, finish - initialization first and then use PySys_GetObject("path") */ - config.module_search_paths_set = 1; - status = PyWideStringList_Append(&config.module_search_paths, - L"/path/to/stdlib"); - if (PyStatus_Exception(status)) { - goto done; - } - status = PyWideStringList_Append(&config.module_search_paths, - L"/path/to/more/modules"); - if (PyStatus_Exception(status)) { - goto done; - } + The :c:func:`PyConfig_Read` function only parses + :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` + is set to ``2`` after arguments are parsed. Since Python arguments are + stripped from :c:member:`PyConfig.argv`, parsing arguments twice would + parse the application options as Python options. - /* Override executable computed by PyConfig_Read() */ - status = PyConfig_SetString(&config, &config.executable, - L"/path/to/my_executable"); - if (PyStatus_Exception(status)) { - goto done; - } + Default: ``1`` in Python mode, ``0`` in isolated mode. - status = Py_InitializeFromConfig(&config); + .. versionchanged:: 3.10 + The :c:member:`PyConfig.argv` arguments are now only parsed if + :c:member:`PyConfig.parse_argv` equals to ``1``. - done: - PyConfig_Clear(&config); - return status; - } + .. c:member:: int parser_debug + Parser debug mode. If greater than ``0``, turn on parser debugging output (for expert only, depending + on compilation options). -.. _init-isolated-conf: + Incremented by the :option:`-d` command line option. Set to the + :envvar:`PYTHONDEBUG` environment variable value. -Isolated Configuration ----------------------- + Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro + must be defined). -:c:func:`PyPreConfig_InitIsolatedConfig` and -:c:func:`PyConfig_InitIsolatedConfig` functions create a configuration to -isolate Python from the system. For example, to embed Python into an -application. + Default: ``0``. -This configuration ignores global configuration variables, environment -variables, command line arguments (:c:member:`PyConfig.argv` is not parsed) -and user site directory. The C standard streams (ex: ``stdout``) and the -LC_CTYPE locale are left unchanged. Signal handlers are not installed. + .. c:member:: int pathconfig_warnings -Configuration files are still used with this configuration to determine -paths that are unspecified. Ensure :c:member:`PyConfig.home` is specified -to avoid computing the default path configuration. + If non-zero, calculation of path configuration is allowed to log + warnings into ``stderr``. If equals to ``0``, suppress these warnings. + Default: ``1`` in Python mode, ``0`` in isolated mode. -.. _init-python-config: + Part of the :ref:`Python Path Configuration ` input. -Python Configuration --------------------- + .. versionchanged:: 3.11 + Now also applies on Windows. -:c:func:`PyPreConfig_InitPythonConfig` and :c:func:`PyConfig_InitPythonConfig` -functions create a configuration to build a customized Python which behaves as -the regular Python. + .. c:member:: wchar_t* prefix -Environments variables and command line arguments are used to configure -Python, whereas global configuration variables are ignored. + The site-specific directory prefix where the platform independent Python + files are installed: :data:`sys.prefix`. -This function enables C locale coercion (:pep:`538`) -and :ref:`Python UTF-8 Mode ` -(:pep:`540`) depending on the LC_CTYPE locale, :envvar:`PYTHONUTF8` and -:envvar:`PYTHONCOERCECLOCALE` environment variables. + Default: ``NULL``. + Part of the :ref:`Python Path Configuration ` output. -.. _init-path-config: + See also :c:member:`PyConfig.base_prefix`. -Python Path Configuration -------------------------- + .. c:member:: wchar_t* program_name -:c:type:`PyConfig` contains multiple fields for the path configuration: + Program name used to initialize :c:member:`~PyConfig.executable` and in + early error messages during Python initialization. -* Path configuration inputs: + * On macOS, use :envvar:`PYTHONEXECUTABLE` environment variable if set. + * If the ``WITH_NEXT_FRAMEWORK`` macro is defined, use + :envvar:`__PYVENV_LAUNCHER__` environment variable if set. + * Use ``argv[0]`` of :c:member:`~PyConfig.argv` if available and + non-empty. + * Otherwise, use ``L"python"`` on Windows, or ``L"python3"`` on other + platforms. - * :c:member:`PyConfig.home` - * :c:member:`PyConfig.platlibdir` - * :c:member:`PyConfig.pathconfig_warnings` - * :c:member:`PyConfig.program_name` - * :c:member:`PyConfig.pythonpath_env` - * current working directory: to get absolute paths - * ``PATH`` environment variable to get the program full path - (from :c:member:`PyConfig.program_name`) - * ``__PYVENV_LAUNCHER__`` environment variable - * (Windows only) Application paths in the registry under - "Software\Python\PythonCore\X.Y\PythonPath" of HKEY_CURRENT_USER and - HKEY_LOCAL_MACHINE (where X.Y is the Python version). + Default: ``NULL``. -* Path configuration output fields: + Part of the :ref:`Python Path Configuration ` input. - * :c:member:`PyConfig.base_exec_prefix` - * :c:member:`PyConfig.base_executable` - * :c:member:`PyConfig.base_prefix` - * :c:member:`PyConfig.exec_prefix` - * :c:member:`PyConfig.executable` - * :c:member:`PyConfig.module_search_paths_set`, - :c:member:`PyConfig.module_search_paths` - * :c:member:`PyConfig.prefix` + .. c:member:: wchar_t* pycache_prefix -If at least one "output field" is not set, Python calculates the path -configuration to fill unset fields. If -:c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, -:c:member:`~PyConfig.module_search_paths` is overridden and -:c:member:`~PyConfig.module_search_paths_set` is set to ``1``. + Directory where cached ``.pyc`` files are written: + :data:`sys.pycache_prefix`. -It is possible to completely ignore the function calculating the default -path configuration by setting explicitly all path configuration output -fields listed above. A string is considered as set even if it is non-empty. -``module_search_paths`` is considered as set if -``module_search_paths_set`` is set to ``1``. In this case, -``module_search_paths`` will be used without modification. + Set by the :option:`-X pycache_prefix=PATH <-X>` command line option and + the :envvar:`PYTHONPYCACHEPREFIX` environment variable. + The command-line option takes precedence. -Set :c:member:`~PyConfig.pathconfig_warnings` to ``0`` to suppress warnings when -calculating the path configuration (Unix only, Windows does not log any warning). + If ``NULL``, :data:`sys.pycache_prefix` is set to ``None``. -If :c:member:`~PyConfig.base_prefix` or :c:member:`~PyConfig.base_exec_prefix` -fields are not set, they inherit their value from :c:member:`~PyConfig.prefix` -and :c:member:`~PyConfig.exec_prefix` respectively. + Default: ``NULL``. -:c:func:`Py_RunMain` and :c:func:`Py_Main` modify :data:`sys.path`: + .. c:member:: int quiet -* If :c:member:`~PyConfig.run_filename` is set and is a directory which contains a - ``__main__.py`` script, prepend :c:member:`~PyConfig.run_filename` to - :data:`sys.path`. -* If :c:member:`~PyConfig.isolated` is zero: + Quiet mode. If greater than ``0``, don't display the copyright and version at + Python startup in interactive mode. - * If :c:member:`~PyConfig.run_module` is set, prepend the current directory - to :data:`sys.path`. Do nothing if the current directory cannot be read. - * If :c:member:`~PyConfig.run_filename` is set, prepend the directory of the - filename to :data:`sys.path`. - * Otherwise, prepend an empty string to :data:`sys.path`. + Incremented by the :option:`-q` command line option. -If :c:member:`~PyConfig.site_import` is non-zero, :data:`sys.path` can be -modified by the :mod:`site` module. If -:c:member:`~PyConfig.user_site_directory` is non-zero and the user's -site-package directory exists, the :mod:`site` module appends the user's -site-package directory to :data:`sys.path`. + Default: ``0``. -The following configuration files are used by the path configuration: + .. c:member:: wchar_t* run_command -* ``pyvenv.cfg`` -* ``._pth`` file (ex: ``python._pth``) -* ``pybuilddir.txt`` (Unix only) + Value of the :option:`-c` command line option. -If a ``._pth`` file is present: + Used by :c:func:`Py_RunMain`. -* Set :c:member:`~PyConfig.isolated` to ``1``. -* Set :c:member:`~PyConfig.use_environment` to ``0``. -* Set :c:member:`~PyConfig.site_import` to ``0``. -* Set :c:member:`~PyConfig.safe_path` to ``1``. + Default: ``NULL``. -If :c:member:`~PyConfig.home` is not set and a ``pyvenv.cfg`` file is present in -the same directory as :c:member:`~PyConfig.executable`, or its parent, -:c:member:`~PyConfig.prefix` and :c:member:`~PyConfig.exec_prefix` are set that -location. When this happens, :c:member:`~PyConfig.base_prefix` and -:c:member:`~PyConfig.base_exec_prefix` still keep their value, pointing to the -base installation. See :ref:`sys-path-init-virtual-environments` for more -information. + .. c:member:: wchar_t* run_filename -The ``__PYVENV_LAUNCHER__`` environment variable is used to set -:c:member:`PyConfig.base_executable`. + Filename passed on the command line: trailing command line argument + without :option:`-c` or :option:`-m`. It is used by the + :c:func:`Py_RunMain` function. + + For example, it is set to ``script.py`` by the ``python3 script.py arg`` + command line. + + See also the :c:member:`PyConfig.skip_source_first_line` option. + + Default: ``NULL``. + + .. c:member:: wchar_t* run_module + + Value of the :option:`-m` command line option. + + Used by :c:func:`Py_RunMain`. + + Default: ``NULL``. + + .. c:member:: wchar_t* run_presite + + ``package.module`` path to module that should be imported before + ``site.py`` is run. + + Set by the :option:`-X presite=package.module <-X>` command-line + option and the :envvar:`PYTHON_PRESITE` environment variable. + The command-line option takes precedence. + + Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro + must be defined). + + Default: ``NULL``. + + .. c:member:: int show_ref_count + + Show total reference count at exit (excluding :term:`immortal` objects)? + + Set to ``1`` by :option:`-X showrefcount <-X>` command line option. + + Needs a :ref:`debug build of Python ` (the ``Py_REF_DEBUG`` + macro must be defined). -.. versionchanged:: 3.14 + Default: ``0``. - :c:member:`~PyConfig.prefix`, and :c:member:`~PyConfig.exec_prefix`, are now - set to the ``pyvenv.cfg`` directory. This was previously done by :mod:`site`, - therefore affected by :option:`-S`. + .. c:member:: int site_import -.. _pyinitconfig_api: + Import the :mod:`site` module at startup? -PyInitConfig C API -================== + If equal to zero, disable the import of the module site and the + site-dependent manipulations of :data:`sys.path` that it entails. -C API to configure the Python initialization (:pep:`741`). + Also disable these manipulations if the :mod:`site` module is explicitly + imported later (call :func:`site.main` if you want them to be triggered). -.. versionadded:: 3.14 + Set to ``0`` by the :option:`-S` command line option. -Create Config -------------- + :data:`sys.flags.no_site ` is set to the inverted value of + :c:member:`~PyConfig.site_import`. -.. c:struct:: PyInitConfig + Default: ``1``. - Opaque structure to configure the Python initialization. + .. c:member:: int skip_source_first_line + If non-zero, skip the first line of the :c:member:`PyConfig.run_filename` + source. -.. c:function:: PyInitConfig* PyInitConfig_Create(void) + It allows the usage of non-Unix forms of ``#!cmd``. This is intended for + a DOS specific hack only. - Create a new initialization configuration using :ref:`Isolated Configuration - ` default values. + Set to ``1`` by the :option:`-x` command line option. - It must be freed by :c:func:`PyInitConfig_Free`. + Default: ``0``. - Return ``NULL`` on memory allocation failure. + .. c:member:: wchar_t* stdio_encoding + .. c:member:: wchar_t* stdio_errors + Encoding and encoding errors of :data:`sys.stdin`, :data:`sys.stdout` and + :data:`sys.stderr` (but :data:`sys.stderr` always uses + ``"backslashreplace"`` error handler). -.. c:function:: void PyInitConfig_Free(PyInitConfig *config) + Use the :envvar:`PYTHONIOENCODING` environment variable if it is + non-empty. - Free memory of the initialization configuration *config*. + Default encoding: - If *config* is ``NULL``, no operation is performed. + * ``"UTF-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. + * Otherwise, use the :term:`locale encoding`. + Default error handler: -Error Handling --------------- + * On Windows: use ``"surrogateescape"``. + * ``"surrogateescape"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero, + or if the LC_CTYPE locale is "C" or "POSIX". + * ``"strict"`` otherwise. -.. c:function:: int PyInitConfig_GetError(PyInitConfig* config, const char **err_msg) + See also :c:member:`PyConfig.legacy_windows_stdio`. - Get the *config* error message. + .. c:member:: int tracemalloc - * Set *\*err_msg* and return ``1`` if an error is set. - * Set *\*err_msg* to ``NULL`` and return ``0`` otherwise. + Enable tracemalloc? - An error message is an UTF-8 encoded string. + If non-zero, call :func:`tracemalloc.start` at startup. - If *config* has an exit code, format the exit code as an error - message. + Set by :option:`-X tracemalloc=N <-X>` command line option and by the + :envvar:`PYTHONTRACEMALLOC` environment variable. - The error message remains valid until another ``PyInitConfig`` - function is called with *config*. The caller doesn't have to free the - error message. + Default: ``-1`` in Python mode, ``0`` in isolated mode. + .. c:member:: int perf_profiling -.. c:function:: int PyInitConfig_GetExitCode(PyInitConfig* config, int *exitcode) + Enable the Linux ``perf`` profiler support? - Get the *config* exit code. + If equals to ``1``, enable support for the Linux ``perf`` profiler. - * Set *\*exitcode* and return ``1`` if *config* has an exit code set. - * Return ``0`` if *config* has no exit code set. + If equals to ``2``, enable support for the Linux ``perf`` profiler with + DWARF JIT support. - Only the ``Py_InitializeFromInitConfig()`` function can set an exit - code if the ``parse_argv`` option is non-zero. + Set to ``1`` by :option:`-X perf <-X>` command-line option and the + :envvar:`PYTHONPERFSUPPORT` environment variable. - An exit code can be set when parsing the command line failed (exit - code ``2``) or when a command line option asks to display the command - line help (exit code ``0``). + Set to ``2`` by the :option:`-X perf_jit <-X>` command-line option and + the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable. + Default: ``-1``. -Get Options ------------ + .. seealso:: + See :ref:`perf_profiling` for more information. -The configuration option *name* parameter must be a non-NULL -null-terminated UTF-8 encoded string. + .. versionadded:: 3.12 -.. c:function:: int PyInitConfig_HasOption(PyInitConfig *config, const char *name) + .. c:member:: wchar_t* stdlib_dir - Test if the configuration has an option called *name*. + Directory of the Python standard library. - Return ``1`` if the option exists, or return ``0`` otherwise. + Default: ``NULL``. + .. versionadded:: 3.11 -.. c:function:: int PyInitConfig_GetInt(PyInitConfig *config, const char *name, int64_t *value) + .. c:member:: int use_environment - Get an integer configuration option. + Use :ref:`environment variables `? - * Set *\*value*, and return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + If equals to zero, ignore the :ref:`environment variables + `. + Set to ``0`` by the :option:`-E` environment variable. -.. c:function:: int PyInitConfig_GetStr(PyInitConfig *config, const char *name, char **value) + Default: ``1`` in Python config and ``0`` in isolated config. - Get a string configuration option as a null-terminated UTF-8 - encoded string. + .. c:member:: int use_system_logger - * Set *\*value*, and return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + If non-zero, ``stdout`` and ``stderr`` will be redirected to the system + log. - *\*value* can be set to ``NULL`` if the option is an optional string and the - option is unset. + Only available on macOS 10.12 and later, and on iOS. - On success, the string must be released with ``free(value)`` if it's not - ``NULL``. + Default: ``0`` (don't use system log). + .. versionadded:: 3.13.2 -.. c:function:: int PyInitConfig_GetStrList(PyInitConfig *config, const char *name, size_t *length, char ***items) + .. c:member:: int user_site_directory - Get a string list configuration option as an array of - null-terminated UTF-8 encoded strings. + If non-zero, add the user site directory to :data:`sys.path`. - * Set *\*length* and *\*value*, and return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + Set to ``0`` by the :option:`-s` and :option:`-I` command line options. - On success, the string list must be released with - ``PyInitConfig_FreeStrList(length, items)``. + Set to ``0`` by the :envvar:`PYTHONNOUSERSITE` environment variable. + Default: ``1`` in Python mode, ``0`` in isolated mode. -.. c:function:: void PyInitConfig_FreeStrList(size_t length, char **items) + .. c:member:: int verbose - Free memory of a string list created by - ``PyInitConfig_GetStrList()``. + Verbose mode. If greater than ``0``, print a message each time a module is + imported, showing the place (filename or built-in module) from which + it is loaded. + If greater than or equal to ``2``, print a message for each file that is + checked for when searching for a module. Also provides information on + module cleanup at exit. -Set Options ------------ + Incremented by the :option:`-v` command line option. -The configuration option *name* parameter must be a non-NULL null-terminated -UTF-8 encoded string. + Set by the :envvar:`PYTHONVERBOSE` environment variable value. -Some configuration options have side effects on other options. This logic is -only implemented when ``Py_InitializeFromInitConfig()`` is called, not by the -"Set" functions below. For example, setting ``dev_mode`` to ``1`` does not set -``faulthandler`` to ``1``. + Default: ``0``. -.. c:function:: int PyInitConfig_SetInt(PyInitConfig *config, const char *name, int64_t value) + .. c:member:: PyWideStringList warnoptions - Set an integer configuration option. + Options of the :mod:`warnings` module to build warnings filters, lowest + to highest priority: :data:`sys.warnoptions`. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + The :mod:`warnings` module adds :data:`sys.warnoptions` in the reverse + order: the last :c:member:`PyConfig.warnoptions` item becomes the first + item of :data:`warnings.filters` which is checked first (highest + priority). + The :option:`-W` command line options adds its value to + :c:member:`~PyConfig.warnoptions`, it can be used multiple times. -.. c:function:: int PyInitConfig_SetStr(PyInitConfig *config, const char *name, const char *value) + The :envvar:`PYTHONWARNINGS` environment variable can also be used to add + warning options. Multiple options can be specified, separated by commas + (``,``). - Set a string configuration option from a null-terminated UTF-8 - encoded string. The string is copied. + Default: empty list. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + .. c:member:: int write_bytecode + If equal to ``0``, Python won't try to write ``.pyc`` files on the import of + source modules. -.. c:function:: int PyInitConfig_SetStrList(PyInitConfig *config, const char *name, size_t length, char * const *items) + Set to ``0`` by the :option:`-B` command line option and the + :envvar:`PYTHONDONTWRITEBYTECODE` environment variable. - Set a string list configuration option from an array of - null-terminated UTF-8 encoded strings. The string list is copied. + :data:`sys.dont_write_bytecode` is initialized to the inverted value of + :c:member:`~PyConfig.write_bytecode`. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + Default: ``1``. + .. c:member:: PyWideStringList xoptions -Module ------- + Values of the :option:`-X` command line options: :data:`sys._xoptions`. -.. c:function:: int PyInitConfig_AddModule(PyInitConfig *config, const char *name, PyObject* (*initfunc)(void)) + Default: empty list. - Add a built-in extension module to the table of built-in modules. + .. c:member:: int _pystats - The new module can be imported by the name *name*, and uses the function - *initfunc* as the initialization function called on the first attempted - import. + If non-zero, write performance statistics at Python exit. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + Need a special build with the ``Py_STATS`` macro: + see :option:`--enable-pystats`. - If Python is initialized multiple times, ``PyInitConfig_AddModule()`` must - be called at each Python initialization. + Default: ``0``. - Similar to the :c:func:`PyImport_AppendInittab` function. +If :c:member:`~PyConfig.parse_argv` is non-zero, :c:member:`~PyConfig.argv` +arguments are parsed the same way the regular Python parses :ref:`command line +arguments `, and Python arguments are stripped from +:c:member:`~PyConfig.argv`. +The :c:member:`~PyConfig.xoptions` options are parsed to set other options: see +the :option:`-X` command line option. -Initialize Python ------------------ +.. versionchanged:: 3.9 -.. c:function:: int Py_InitializeFromInitConfig(PyInitConfig *config) + The ``show_alloc_count`` field has been removed. - Initialize Python from the initialization configuration. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. - * Set an exit code in *config* and return ``-1`` if Python wants to - exit. +.. _init-from-config: - See ``PyInitConfig_GetExitcode()`` for the exit code case. +Initialization with PyConfig +---------------------------- +Initializing the interpreter from a populated configuration struct is handled +by calling :c:func:`Py_InitializeFromConfig`. -Example -------- +The caller is responsible to handle exceptions (error or exit) using +:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. -Example initializing Python, set configuration options of various types, -return ``-1`` on error: +If :c:func:`PyImport_FrozenModules`, :c:func:`PyImport_AppendInittab` or +:c:func:`PyImport_ExtendInittab` are used, they must be set or called after +Python preinitialization and before the Python initialization. If Python is +initialized multiple times, :c:func:`PyImport_AppendInittab` or +:c:func:`PyImport_ExtendInittab` must be called before each Python +initialization. -.. code-block:: c +The current configuration (``PyConfig`` type) is stored in +``PyInterpreterState.config``. - int init_python(void) - { - PyInitConfig *config = PyInitConfig_Create(); - if (config == NULL) { - printf("PYTHON INIT ERROR: memory allocation failed\n"); - return -1; - } +Example setting the program name:: - // Set an integer (dev mode) - if (PyInitConfig_SetInt(config, "dev_mode", 1) < 0) { - goto error; - } + void init_python(void) + { + PyStatus status; - // Set a list of UTF-8 strings (argv) - char *argv[] = {"my_program", "-c", "pass"}; - if (PyInitConfig_SetStrList(config, "argv", - Py_ARRAY_LENGTH(argv), argv) < 0) { - goto error; - } + PyConfig config; + PyConfig_InitPythonConfig(&config); - // Set a UTF-8 string (program name) - if (PyInitConfig_SetStr(config, "program_name", L"my_program") < 0) { - goto error; + /* Set the program name. Implicitly preinitialize Python. */ + status = PyConfig_SetString(&config, &config.program_name, + L"/path/to/my_program"); + if (PyStatus_Exception(status)) { + goto exception; } - // Initialize Python with the configuration - if (Py_InitializeFromInitConfig(config) < 0) { - goto error; + status = Py_InitializeFromConfig(&config); + if (PyStatus_Exception(status)) { + goto exception; } - PyInitConfig_Free(config); - return 0; - - error: - { - // Display the error message - // This uncommon braces style is used, because you cannot make - // goto targets point to variable declarations. - const char *err_msg; - (void)PyInitConfig_GetError(config, &err_msg); - printf("PYTHON INIT ERROR: %s\n", err_msg); - PyInitConfig_Free(config); + PyConfig_Clear(&config); + return; - return -1; - } + exception: + PyConfig_Clear(&config); + Py_ExitStatusException(status); } +More complete example modifying the default configuration, read the +configuration, and then override some parameters. Note that since +3.11, many parameters are not calculated until initialization, and +so values cannot be read from the configuration structure. Any values +set before initialize is called will be left unchanged by +initialization:: -Runtime Python configuration API -================================ - -The configuration option *name* parameter must be a non-NULL null-terminated -UTF-8 encoded string. - -Some options are read from the :mod:`sys` attributes. For example, the option -``"argv"`` is read from :data:`sys.argv`. + PyStatus init_python(const char *program_name) + { + PyStatus status; + PyConfig config; + PyConfig_InitPythonConfig(&config); -.. c:function:: PyObject* PyConfig_Get(const char *name) + /* Set the program name before reading the configuration + (decode byte string from the locale encoding). - Get the current runtime value of a configuration option as a Python object. + Implicitly preinitialize Python. */ + status = PyConfig_SetBytesString(&config, &config.program_name, + program_name); + if (PyStatus_Exception(status)) { + goto done; + } - * Return a new reference on success. - * Set an exception and return ``NULL`` on error. + /* Read all configuration at once */ + status = PyConfig_Read(&config); + if (PyStatus_Exception(status)) { + goto done; + } - The object type depends on the configuration option. It can be: + /* Specify sys.path explicitly */ + /* If you want to modify the default set of paths, finish + initialization first and then use PySys_GetObject("path") */ + config.module_search_paths_set = 1; + status = PyWideStringList_Append(&config.module_search_paths, + L"/path/to/stdlib"); + if (PyStatus_Exception(status)) { + goto done; + } + status = PyWideStringList_Append(&config.module_search_paths, + L"/path/to/more/modules"); + if (PyStatus_Exception(status)) { + goto done; + } - * ``bool`` - * ``int`` - * ``str`` - * ``list[str]`` - * ``dict[str, str]`` + /* Override executable computed by PyConfig_Read() */ + status = PyConfig_SetString(&config, &config.executable, + L"/path/to/my_executable"); + if (PyStatus_Exception(status)) { + goto done; + } - The caller must hold the GIL. The function cannot be called before - Python initialization nor after Python finalization. + status = Py_InitializeFromConfig(&config); - .. versionadded:: 3.14 + done: + PyConfig_Clear(&config); + return status; + } -.. c:function:: int PyConfig_GetInt(const char *name, int *value) +.. _init-isolated-conf: - Similar to :c:func:`PyConfig_Get`, but get the value as a C int. +Isolated Configuration +---------------------- - * Return ``0`` on success. - * Set an exception and return ``-1`` on error. +:c:func:`PyPreConfig_InitIsolatedConfig` and +:c:func:`PyConfig_InitIsolatedConfig` functions create a configuration to +isolate Python from the system. For example, to embed Python into an +application. - .. versionadded:: 3.14 +This configuration ignores global configuration variables, environment +variables, command line arguments (:c:member:`PyConfig.argv` is not parsed) +and user site directory. The C standard streams (ex: ``stdout``) and the +LC_CTYPE locale are left unchanged. Signal handlers are not installed. +Configuration files are still used with this configuration to determine +paths that are unspecified. Ensure :c:member:`PyConfig.home` is specified +to avoid computing the default path configuration. -.. c:function:: PyObject* PyConfig_Names(void) - Get all configuration option names as a ``frozenset``. +.. _init-python-config: - * Return a new reference on success. - * Set an exception and return ``NULL`` on error. +Python Configuration +-------------------- - The caller must hold the GIL. The function cannot be called before - Python initialization nor after Python finalization. +:c:func:`PyPreConfig_InitPythonConfig` and :c:func:`PyConfig_InitPythonConfig` +functions create a configuration to build a customized Python which behaves as +the regular Python. - .. versionadded:: 3.14 +Environments variables and command line arguments are used to configure +Python, whereas global configuration variables are ignored. +This function enables C locale coercion (:pep:`538`) +and :ref:`Python UTF-8 Mode ` +(:pep:`540`) depending on the LC_CTYPE locale, :envvar:`PYTHONUTF8` and +:envvar:`PYTHONCOERCECLOCALE` environment variables. -.. c:function:: int PyConfig_Set(const char *name, PyObject *value) - Set the current runtime value of a configuration option. +.. _init-path-config: - * Raise a :exc:`ValueError` if there is no option *name*. - * Raise a :exc:`ValueError` if *value* is an invalid value. - * Raise a :exc:`ValueError` if the option is read-only (cannot be set). - * Raise a :exc:`TypeError` if *value* has not the proper type. +Python Path Configuration +------------------------- - The caller must hold the GIL. The function cannot be called before - Python initialization nor after Python finalization. +:c:type:`PyConfig` contains multiple fields for the path configuration: - .. versionadded:: 3.14 +* Path configuration inputs: + * :c:member:`PyConfig.home` + * :c:member:`PyConfig.platlibdir` + * :c:member:`PyConfig.pathconfig_warnings` + * :c:member:`PyConfig.program_name` + * :c:member:`PyConfig.pythonpath_env` + * current working directory: to get absolute paths + * ``PATH`` environment variable to get the program full path + (from :c:member:`PyConfig.program_name`) + * ``__PYVENV_LAUNCHER__`` environment variable + * (Windows only) Application paths in the registry under + "Software\Python\PythonCore\X.Y\PythonPath" of HKEY_CURRENT_USER and + HKEY_LOCAL_MACHINE (where X.Y is the Python version). -Py_GetArgcArgv() -================ +* Path configuration output fields: -.. c:function:: void Py_GetArgcArgv(int *argc, wchar_t ***argv) + * :c:member:`PyConfig.base_exec_prefix` + * :c:member:`PyConfig.base_executable` + * :c:member:`PyConfig.base_prefix` + * :c:member:`PyConfig.exec_prefix` + * :c:member:`PyConfig.executable` + * :c:member:`PyConfig.module_search_paths_set`, + :c:member:`PyConfig.module_search_paths` + * :c:member:`PyConfig.prefix` - Get the original command line arguments, before Python modified them. +If at least one "output field" is not set, Python calculates the path +configuration to fill unset fields. If +:c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, +:c:member:`~PyConfig.module_search_paths` is overridden and +:c:member:`~PyConfig.module_search_paths_set` is set to ``1``. - See also :c:member:`PyConfig.orig_argv` member. +It is possible to completely ignore the function calculating the default +path configuration by setting explicitly all path configuration output +fields listed above. A string is considered as set even if it is non-empty. +``module_search_paths`` is considered as set if +``module_search_paths_set`` is set to ``1``. In this case, +``module_search_paths`` will be used without modification. +Set :c:member:`~PyConfig.pathconfig_warnings` to ``0`` to suppress warnings when +calculating the path configuration (Unix only, Windows does not log any warning). -Multi-Phase Initialization Private Provisional API -================================================== +If :c:member:`~PyConfig.base_prefix` or :c:member:`~PyConfig.base_exec_prefix` +fields are not set, they inherit their value from :c:member:`~PyConfig.prefix` +and :c:member:`~PyConfig.exec_prefix` respectively. -This section is a private provisional API introducing multi-phase -initialization, the core feature of :pep:`432`: +:c:func:`Py_RunMain` and :c:func:`Py_Main` modify :data:`sys.path`: -* "Core" initialization phase, "bare minimum Python": +* If :c:member:`~PyConfig.run_filename` is set and is a directory which contains a + ``__main__.py`` script, prepend :c:member:`~PyConfig.run_filename` to + :data:`sys.path`. +* If :c:member:`~PyConfig.isolated` is zero: - * Builtin types; - * Builtin exceptions; - * Builtin and frozen modules; - * The :mod:`sys` module is only partially initialized - (ex: :data:`sys.path` doesn't exist yet). + * If :c:member:`~PyConfig.run_module` is set, prepend the current directory + to :data:`sys.path`. Do nothing if the current directory cannot be read. + * If :c:member:`~PyConfig.run_filename` is set, prepend the directory of the + filename to :data:`sys.path`. + * Otherwise, prepend an empty string to :data:`sys.path`. -* "Main" initialization phase, Python is fully initialized: +If :c:member:`~PyConfig.site_import` is non-zero, :data:`sys.path` can be +modified by the :mod:`site` module. If +:c:member:`~PyConfig.user_site_directory` is non-zero and the user's +site-package directory exists, the :mod:`site` module appends the user's +site-package directory to :data:`sys.path`. - * Install and configure :mod:`importlib`; - * Apply the :ref:`Path Configuration `; - * Install signal handlers; - * Finish :mod:`sys` module initialization (ex: create :data:`sys.stdout` - and :data:`sys.path`); - * Enable optional features like :mod:`faulthandler` and :mod:`tracemalloc`; - * Import the :mod:`site` module; - * etc. +The following configuration files are used by the path configuration: -Private provisional API: +* ``pyvenv.cfg`` +* ``._pth`` file (ex: ``python._pth``) +* ``pybuilddir.txt`` (Unix only) -* :c:member:`PyConfig._init_main`: if set to ``0``, - :c:func:`Py_InitializeFromConfig` stops at the "Core" initialization phase. +If a ``._pth`` file is present: -.. c:function:: PyStatus _Py_InitializeMain(void) +* Set :c:member:`~PyConfig.isolated` to ``1``. +* Set :c:member:`~PyConfig.use_environment` to ``0``. +* Set :c:member:`~PyConfig.site_import` to ``0``. +* Set :c:member:`~PyConfig.safe_path` to ``1``. - Move to the "Main" initialization phase, finish the Python initialization. +If :c:member:`~PyConfig.home` is not set and a ``pyvenv.cfg`` file is present in +the same directory as :c:member:`~PyConfig.executable`, or its parent, +:c:member:`~PyConfig.prefix` and :c:member:`~PyConfig.exec_prefix` are set that +location. When this happens, :c:member:`~PyConfig.base_prefix` and +:c:member:`~PyConfig.base_exec_prefix` still keep their value, pointing to the +base installation. See :ref:`sys-path-init-virtual-environments` for more +information. -No module is imported during the "Core" phase and the ``importlib`` module is -not configured: the :ref:`Path Configuration ` is only -applied during the "Main" phase. It may allow to customize Python in Python to -override or tune the :ref:`Path Configuration `, maybe -install a custom :data:`sys.meta_path` importer or an import hook, etc. +The ``__PYVENV_LAUNCHER__`` environment variable is used to set +:c:member:`PyConfig.base_executable`. -It may become possible to calculate the :ref:`Path Configuration -` in Python, after the Core phase and before the Main phase, -which is one of the :pep:`432` motivation. +.. versionchanged:: 3.14 -The "Core" phase is not properly defined: what should be and what should -not be available at this phase is not specified yet. The API is marked -as private and provisional: the API can be modified or even be removed -anytime until a proper public API is designed. + :c:member:`~PyConfig.prefix`, and :c:member:`~PyConfig.exec_prefix`, are now + set to the ``pyvenv.cfg`` directory. This was previously done by :mod:`site`, + therefore affected by :option:`-S`. -Example running Python code between "Core" and "Main" initialization -phases:: - void init_python(void) - { - PyStatus status; +Py_GetArgcArgv() +================ - PyConfig config; - PyConfig_InitPythonConfig(&config); - config._init_main = 0; +.. c:function:: void Py_GetArgcArgv(int *argc, wchar_t ***argv) - /* ... customize 'config' configuration ... */ + Get the original command line arguments, before Python modified them. - status = Py_InitializeFromConfig(&config); - PyConfig_Clear(&config); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } + See also :c:member:`PyConfig.orig_argv` member. - /* Use sys.stderr because sys.stdout is only created - by _Py_InitializeMain() */ - int res = PyRun_SimpleString( - "import sys; " - "print('Run Python code before _Py_InitializeMain', " - "file=sys.stderr)"); - if (res < 0) { - exit(1); - } +Delaying main module execution +============================== - /* ... put more configuration code here ... */ +In some embedding use cases, it may be desirable to separate interpreter initialization +from the execution of the main module. - status = _Py_InitializeMain(); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - } +This separation can be achieved by setting ``PyConfig.run_command`` to the empty +string during initialization (to prevent the interpreter from dropping into the +interactive prompt), and then subsequently executing the desired main module +code using ``__main__.__dict__`` as the global namespace. diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 084ba513493ffe..25d9e62e387279 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -824,6 +824,6 @@ The :c:type:`PyLongWriter` API can be used to import an integer. Discard a :c:type:`PyLongWriter` created by :c:func:`PyLongWriter_Create`. - *writer* must not be ``NULL``. + If *writer* is ``NULL``, no operation is performed. The writer instance and the *digits* array are invalid after the call. diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index f82a050ab75de0..f71089370152ce 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -523,9 +523,6 @@ state: On success, return ``0``. On error, raise an exception and return ``-1``. - Return ``-1`` if *value* is ``NULL``. It must be called with an exception - raised in this case. - Example usage:: static int @@ -540,6 +537,10 @@ state: return res; } + To be convenient, the function accepts ``NULL`` *value* with an exception + set. In this case, return ``-1`` and just leave the raised exception + unchanged. + The example can also be written without checking explicitly if *obj* is ``NULL``:: diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 3a434a4173eafa..1ba5942c63601d 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -613,3 +613,95 @@ Object Protocol .. versionadded:: 3.14 +.. c:function:: int PyUnstable_IsImmortal(PyObject *obj) + + This function returns non-zero if *obj* is :term:`immortal`, and zero + otherwise. This function cannot fail. + + .. note:: + + Objects that are immortal in one CPython version are not guaranteed to + be immortal in another. + + .. versionadded:: next + +.. c:function:: int PyUnstable_TryIncRef(PyObject *obj) + + Increments the reference count of *obj* if it is not zero. Returns ``1`` + if the object's reference count was successfully incremented. Otherwise, + this function returns ``0``. + + :c:func:`PyUnstable_EnableTryIncRef` must have been called + earlier on *obj* or this function may spuriously return ``0`` in the + :term:`free threading` build. + + This function is logically equivalent to the following C code, except that + it behaves atomically in the :term:`free threading` build:: + + if (Py_REFCNT(op) > 0) { + Py_INCREF(op); + return 1; + } + return 0; + + This is intended as a building block for managing weak references + without the overhead of a Python :ref:`weak reference object `. + + Typically, correct use of this function requires support from *obj*'s + deallocator (:c:member:`~PyTypeObject.tp_dealloc`). + For example, the following sketch could be adapted to implement a + "weakmap" that works like a :py:class:`~weakref.WeakValueDictionary` + for a specific type: + + .. code-block:: c + + PyMutex mutex; + + PyObject * + add_entry(weakmap_key_type *key, PyObject *value) + { + PyUnstable_EnableTryIncRef(value); + weakmap_type weakmap = ...; + PyMutex_Lock(&mutex); + weakmap_add_entry(weakmap, key, value); + PyMutex_Unlock(&mutex); + Py_RETURN_NONE; + } + + PyObject * + get_value(weakmap_key_type *key) + { + weakmap_type weakmap = ...; + PyMutex_Lock(&mutex); + PyObject *result = weakmap_find(weakmap, key); + if (PyUnstable_TryIncRef(result)) { + // `result` is safe to use + PyMutex_Unlock(&mutex); + return result; + } + // if we get here, `result` is starting to be garbage-collected, + // but has not been removed from the weakmap yet + PyMutex_Unlock(&mutex); + return NULL; + } + + // tp_dealloc function for weakmap values + void + value_dealloc(PyObject *value) + { + weakmap_type weakmap = ...; + PyMutex_Lock(&mutex); + weakmap_remove_value(weakmap, value); + + ... + PyMutex_Unlock(&mutex); + } + + .. versionadded:: 3.14 + +.. c:function:: void PyUnstable_EnableTryIncRef(PyObject *obj) + + Enables subsequent uses of :c:func:`PyUnstable_TryIncRef` on *obj*. The + caller must hold a :term:`strong reference` to *obj* when calling this. + + .. versionadded:: 3.14 diff --git a/Doc/c-api/sys.rst b/Doc/c-api/sys.rst index c688afdca8231d..4ab5df4ccccdbb 100644 --- a/Doc/c-api/sys.rst +++ b/Doc/c-api/sys.rst @@ -216,6 +216,38 @@ Operating System Utilities The function now uses the UTF-8 encoding on Windows if :c:member:`PyPreConfig.legacy_windows_fs_encoding` is zero. +.. c:function:: FILE* Py_fopen(PyObject *path, const char *mode) + + Similar to :c:func:`!fopen`, but *path* is a Python object and + an exception is set on error. + + *path* must be a :class:`str` object, a :class:`bytes` object, + or a :term:`path-like object`. + + On success, return the new file pointer. + On error, set an exception and return ``NULL``. + + The file must be closed by :c:func:`Py_fclose` rather than calling directly + :c:func:`!fclose`. + + The file descriptor is created non-inheritable (:pep:`446`). + + The caller must hold the GIL. + + .. versionadded:: 3.14 + + +.. c:function:: int Py_fclose(FILE *file) + + Close a file that was opened by :c:func:`Py_fopen`. + + On success, return ``0``. + On error, return ``EOF`` and ``errno`` is set to indicate the error. + In either case, any further access (including another call to + :c:func:`Py_fclose`) to the stream results in undefined behavior. + + .. versionadded:: 3.14 + .. _systemfunctions: diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index dcbc8804cd6b89..94110d48ed7d85 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -786,16 +786,25 @@ Functions encoding to and decoding from the :term:`filesystem encoding and error handler` (:pep:`383` and :pep:`529`). To encode file names to :class:`bytes` during argument parsing, the ``"O&"`` -converter should be used, passing :c:func:`PyUnicode_FSConverter` as the +converter should be used, passing :c:func:`!PyUnicode_FSConverter` as the conversion function: .. c:function:: int PyUnicode_FSConverter(PyObject* obj, void* result) - ParseTuple converter: encode :class:`str` objects -- obtained directly or + :ref:`PyArg_Parse\* converter `: encode :class:`str` objects -- obtained directly or through the :class:`os.PathLike` interface -- to :class:`bytes` using :c:func:`PyUnicode_EncodeFSDefault`; :class:`bytes` objects are output as-is. - *result* must be a :c:expr:`PyBytesObject*` which must be released when it is - no longer used. + *result* must be an address of a C variable of type :c:expr:`PyObject*` + (or :c:expr:`PyBytesObject*`). + On success, set the variable to a new :term:`strong reference` to + a :ref:`bytes object ` which must be released + when it is no longer used and return a non-zero value + (:c:macro:`Py_CLEANUP_SUPPORTED`). + Embedded null bytes are not allowed in the result. + On failure, return ``0`` with an exception set. + + If *obj* is ``NULL``, the function releases a strong reference + stored in the variable referred by *result* and returns ``1``. .. versionadded:: 3.1 @@ -803,16 +812,26 @@ conversion function: Accepts a :term:`path-like object`. To decode file names to :class:`str` during argument parsing, the ``"O&"`` -converter should be used, passing :c:func:`PyUnicode_FSDecoder` as the +converter should be used, passing :c:func:`!PyUnicode_FSDecoder` as the conversion function: .. c:function:: int PyUnicode_FSDecoder(PyObject* obj, void* result) - ParseTuple converter: decode :class:`bytes` objects -- obtained either + :ref:`PyArg_Parse\* converter `: decode :class:`bytes` objects -- obtained either directly or indirectly through the :class:`os.PathLike` interface -- to :class:`str` using :c:func:`PyUnicode_DecodeFSDefaultAndSize`; :class:`str` - objects are output as-is. *result* must be a :c:expr:`PyUnicodeObject*` which - must be released when it is no longer used. + objects are output as-is. + *result* must be an address of a C variable of type :c:expr:`PyObject*` + (or :c:expr:`PyUnicodeObject*`). + On success, set the variable to a new :term:`strong reference` to + a :ref:`Unicode object ` which must be released + when it is no longer used and return a non-zero value + (:c:macro:`Py_CLEANUP_SUPPORTED`). + Embedded null characters are not allowed in the result. + On failure, return ``0`` with an exception set. + + If *obj* is ``NULL``, release the strong reference + to the object referred to by *result* and return ``1``. .. versionadded:: 3.2 @@ -1035,6 +1054,15 @@ These are the UTF-8 codec APIs: As :c:func:`PyUnicode_AsUTF8AndSize`, but does not store the size. + .. warning:: + + This function does not have any special behavior for + `null characters `_ embedded within + *unicode*. As a result, strings containing null characters will remain in the returned + string, which some C functions might interpret as the end of the string, leading to + truncation. If truncation is an issue, it is recommended to use :c:func:`PyUnicode_AsUTF8AndSize` + instead. + .. versionadded:: 3.3 .. versionchanged:: 3.7 diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst index 9f02bdb5896563..1ef4181d52eb10 100644 --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -348,8 +348,20 @@ the same library that the Python runtime is using. .. versionchanged:: 3.8 Added *cf_feature_version* field. + The available compiler flags are accessible as macros: -.. c:var:: int CO_FUTURE_DIVISION + .. c:namespace:: NULL - This bit can be set in *flags* to cause division operator ``/`` to be - interpreted as "true division" according to :pep:`238`. + .. c:macro:: PyCF_ALLOW_TOP_LEVEL_AWAIT + PyCF_ONLY_AST + PyCF_OPTIMIZED_AST + PyCF_TYPE_COMMENTS + + See :ref:`compiler flags ` in documentation of the + :py:mod:`!ast` Python module, which exports these constants under + the same names. + + .. c:var:: int CO_FUTURE_DIVISION + + This bit can be set in *flags* to cause division operator ``/`` to be + interpreted as "true division" according to :pep:`238`. diff --git a/Doc/conf.py b/Doc/conf.py index ae08c7fa288080..94af54084ee338 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -9,9 +9,6 @@ import importlib import os import sys -import time - -import sphinx # Make our custom extensions available to Sphinx sys.path.append(os.path.abspath('tools/extensions')) @@ -28,8 +25,11 @@ 'audit_events', 'availability', 'c_annotations', + 'changes', 'glossary_search', 'lexers', + 'misc_news', + 'pydoc_topics', 'pyspecific', 'sphinx.ext.coverage', 'sphinx.ext.doctest', @@ -97,7 +97,8 @@ highlight_language = 'python3' # Minimum version of sphinx required -needs_sphinx = '7.2.6' +# Keep this version in sync with ``Doc/requirements.txt``. +needs_sphinx = '8.1.3' # Create table of contents entries for domain objects (e.g. functions, classes, # attributes, etc.). Default is True. @@ -376,13 +377,7 @@ # This 'Last updated on:' timestamp is inserted at the bottom of every page. html_last_updated_fmt = '%b %d, %Y (%H:%M UTC)' -if sphinx.version_info[:2] >= (8, 1): - html_last_updated_use_utc = True -else: - html_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) - html_last_updated_fmt = time.strftime( - html_last_updated_fmt, time.gmtime(html_time) - ) +html_last_updated_use_utc = True # Path to find HTML templates to override theme templates_path = ['tools/templates'] @@ -566,8 +561,6 @@ r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*', # Intentional HTTP use at Misc/NEWS.d/3.5.0a1.rst r'http://www.python.org/$': 'https://www.python.org/$', - # Used in license page, keep as is - r'https://www.zope.org/': r'https://www.zope.dev/', # Microsoft's redirects to learn.microsoft.com r'https://msdn.microsoft.com/.*': 'https://learn.microsoft.com/.*', r'https://docs.microsoft.com/.*': 'https://learn.microsoft.com/.*', @@ -619,16 +612,6 @@ } extlinks_detect_hardcoded_links = True -if sphinx.version_info[:2] < (8, 1): - # Sphinx 8.1 has in-built CVE and CWE roles. - extlinks |= { - "cve": ( - "https://www.cve.org/CVERecord?id=CVE-%s", - "CVE-%s", - ), - "cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"), - } - # Options for c_annotations extension # ----------------------------------- diff --git a/Doc/constraints.txt b/Doc/constraints.txt index 26ac1862dbac0b..29cd4be1d3c8db 100644 --- a/Doc/constraints.txt +++ b/Doc/constraints.txt @@ -13,14 +13,12 @@ packaging<25 Pygments<3 requests<3 snowballstemmer<3 -# keep lower-bounds until Sphinx 8.1 is released -# https://github.com/sphinx-doc/sphinx/pull/12756 -sphinxcontrib-applehelp>=1.0.7,<3 -sphinxcontrib-devhelp>=1.0.6,<3 -sphinxcontrib-htmlhelp>=2.0.6,<3 -sphinxcontrib-jsmath>=1.0.1,<2 -sphinxcontrib-qthelp>=1.0.6,<3 -sphinxcontrib-serializinghtml>=1.1.9,<3 +sphinxcontrib-applehelp<3 +sphinxcontrib-devhelp<3 +sphinxcontrib-htmlhelp<3 +sphinxcontrib-jsmath<2 +sphinxcontrib-qthelp<3 +sphinxcontrib-serializinghtml<3 # Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) MarkupSafe<3 diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index e78754e24e23d8..d709d2d91b0eb0 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -3052,3 +3052,11 @@ _Py_c_quot:Py_complex:divisor:: _Py_c_sum:Py_complex::: _Py_c_sum:Py_complex:left:: _Py_c_sum:Py_complex:right:: + +PyImport_ImportModuleAttr:PyObject*::+1: +PyImport_ImportModuleAttr:PyObject*:mod_name:0: +PyImport_ImportModuleAttr:PyObject*:attr_name:0: + +PyImport_ImportModuleAttrString:PyObject*::+1: +PyImport_ImportModuleAttrString:const char *:mod_name:: +PyImport_ImportModuleAttrString:const char *:attr_name:: diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index 6f9d27297e8f65..59e7a31bc2ef06 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -582,7 +582,6 @@ func,PySequence_Contains,3.2,, func,PySequence_Count,3.2,, func,PySequence_DelItem,3.2,, func,PySequence_DelSlice,3.2,, -func,PySequence_Fast,3.2,, func,PySequence_GetItem,3.2,, func,PySequence_GetSlice,3.2,, func,PySequence_In,3.2,, @@ -883,6 +882,8 @@ func,Py_Main,3.2,, func,Py_MakePendingCalls,3.2,, func,Py_NewInterpreter,3.2,, func,Py_NewRef,3.10,, +func,Py_PACK_FULL_VERSION,3.14,, +func,Py_PACK_VERSION,3.14,, func,Py_REFCNT,3.14,, func,Py_ReprEnter,3.2,, func,Py_ReprLeave,3.2,, diff --git a/Doc/deprecations/c-api-pending-removal-in-3.14.rst b/Doc/deprecations/c-api-pending-removal-in-3.14.rst index 9e10bf2691e5c8..c805074669811a 100644 --- a/Doc/deprecations/c-api-pending-removal-in-3.14.rst +++ b/Doc/deprecations/c-api-pending-removal-in-3.14.rst @@ -6,67 +6,3 @@ Pending removal in Python 3.14 * Creating :c:data:`immutable types ` with mutable bases (:gh:`95388`). - -* Functions to configure Python's initialization, deprecated in Python 3.11: - - * :c:func:`!PySys_SetArgvEx()`: - Set :c:member:`PyConfig.argv` instead. - * :c:func:`!PySys_SetArgv()`: - Set :c:member:`PyConfig.argv` instead. - * :c:func:`!Py_SetProgramName()`: - Set :c:member:`PyConfig.program_name` instead. - * :c:func:`!Py_SetPythonHome()`: - Set :c:member:`PyConfig.home` instead. - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -* Global configuration variables: - - * :c:var:`Py_DebugFlag`: - Use :c:member:`PyConfig.parser_debug` instead. - * :c:var:`Py_VerboseFlag`: - Use :c:member:`PyConfig.verbose` instead. - * :c:var:`Py_QuietFlag`: - Use :c:member:`PyConfig.quiet` instead. - * :c:var:`Py_InteractiveFlag`: - Use :c:member:`PyConfig.interactive` instead. - * :c:var:`Py_InspectFlag`: - Use :c:member:`PyConfig.inspect` instead. - * :c:var:`Py_OptimizeFlag`: - Use :c:member:`PyConfig.optimization_level` instead. - * :c:var:`Py_NoSiteFlag`: - Use :c:member:`PyConfig.site_import` instead. - * :c:var:`Py_BytesWarningFlag`: - Use :c:member:`PyConfig.bytes_warning` instead. - * :c:var:`Py_FrozenFlag`: - Use :c:member:`PyConfig.pathconfig_warnings` instead. - * :c:var:`Py_IgnoreEnvironmentFlag`: - Use :c:member:`PyConfig.use_environment` instead. - * :c:var:`Py_DontWriteBytecodeFlag`: - Use :c:member:`PyConfig.write_bytecode` instead. - * :c:var:`Py_NoUserSiteDirectory`: - Use :c:member:`PyConfig.user_site_directory` instead. - * :c:var:`Py_UnbufferedStdioFlag`: - Use :c:member:`PyConfig.buffered_stdio` instead. - * :c:var:`Py_HashRandomizationFlag`: - Use :c:member:`PyConfig.use_hash_seed` - and :c:member:`PyConfig.hash_seed` instead. - * :c:var:`Py_IsolatedFlag`: - Use :c:member:`PyConfig.isolated` instead. - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: - Use :c:member:`PyPreConfig.legacy_windows_fs_encoding` instead. - * :c:var:`Py_LegacyWindowsStdioFlag`: - Use :c:member:`PyConfig.legacy_windows_stdio` instead. - * :c:var:`!Py_FileSystemDefaultEncoding`: - Use :c:member:`PyConfig.filesystem_encoding` instead. - * :c:var:`!Py_HasFileSystemDefaultEncoding`: - Use :c:member:`PyConfig.filesystem_encoding` instead. - * :c:var:`!Py_FileSystemDefaultEncodeErrors`: - Use :c:member:`PyConfig.filesystem_errors` instead. - * :c:var:`!Py_UTF8Mode`: - Use :c:member:`PyPreConfig.utf8_mode` instead. - (see :c:func:`Py_PreInitialize`) - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst index 0ce0f9c118c094..666a1622dd0b29 100644 --- a/Doc/deprecations/c-api-pending-removal-in-3.15.rst +++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst @@ -5,23 +5,122 @@ Pending removal in Python 3.15 * The :c:func:`PyImport_ImportModuleNoBlock`: Use :c:func:`PyImport_ImportModule` instead. * :c:func:`PyWeakref_GetObject` and :c:func:`PyWeakref_GET_OBJECT`: - Use :c:func:`PyWeakref_GetRef` instead. + Use :c:func:`PyWeakref_GetRef` instead. The `pythoncapi-compat project + `__ can be used to get + :c:func:`PyWeakref_GetRef` on Python 3.12 and older. * :c:type:`Py_UNICODE` type and the :c:macro:`!Py_UNICODE_WIDE` macro: Use :c:type:`wchar_t` instead. -* Python initialization functions: +* Python initialization functions, deprecated in Python 3.13: - * :c:func:`PySys_ResetWarnOptions`: - Clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. - * :c:func:`Py_GetExecPrefix`: - Get :data:`sys.base_exec_prefix` and :data:`sys.exec_prefix` instead. * :c:func:`Py_GetPath`: - Get :data:`sys.path` instead. + Use :c:func:`PyConfig_Get("module_search_paths") ` + (:data:`sys.path`) instead. * :c:func:`Py_GetPrefix`: - Get :data:`sys.base_prefix` and :data:`sys.prefix` instead. + Use :c:func:`PyConfig_Get("base_prefix") ` + (:data:`sys.base_prefix`) instead. Use :c:func:`PyConfig_Get("prefix") + ` (:data:`sys.prefix`) if :ref:`virtual environments + ` need to be handled. + * :c:func:`Py_GetExecPrefix`: + Use :c:func:`PyConfig_Get("base_exec_prefix") ` + (:data:`sys.base_exec_prefix`) instead. Use + :c:func:`PyConfig_Get("exec_prefix") ` + (:data:`sys.exec_prefix`) if :ref:`virtual environments ` need to + be handled. * :c:func:`Py_GetProgramFullPath`: - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. * :c:func:`Py_GetProgramName`: - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. * :c:func:`Py_GetPythonHome`: - Get :c:member:`PyConfig.home` - or the :envvar:`PYTHONHOME` environment variable instead. + Use :c:func:`PyConfig_Get("home") ` or the + :envvar:`PYTHONHOME` environment variable instead. + + The `pythoncapi-compat project + `__ can be used to get + :c:func:`PyConfig_Get` on Python 3.13 and older. + +* Functions to configure Python's initialization, deprecated in Python 3.11: + + * :c:func:`!PySys_SetArgvEx()`: + Set :c:member:`PyConfig.argv` instead. + * :c:func:`!PySys_SetArgv()`: + Set :c:member:`PyConfig.argv` instead. + * :c:func:`!Py_SetProgramName()`: + Set :c:member:`PyConfig.program_name` instead. + * :c:func:`!Py_SetPythonHome()`: + Set :c:member:`PyConfig.home` instead. + * :c:func:`PySys_ResetWarnOptions`: + Clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` instead. + +* Global configuration variables: + + * :c:var:`Py_DebugFlag`: + Use :c:member:`PyConfig.parser_debug` or + :c:func:`PyConfig_Get("parser_debug") ` instead. + * :c:var:`Py_VerboseFlag`: + Use :c:member:`PyConfig.verbose` or + :c:func:`PyConfig_Get("verbose") ` instead. + * :c:var:`Py_QuietFlag`: + Use :c:member:`PyConfig.quiet` or + :c:func:`PyConfig_Get("quiet") ` instead. + * :c:var:`Py_InteractiveFlag`: + Use :c:member:`PyConfig.interactive` or + :c:func:`PyConfig_Get("interactive") ` instead. + * :c:var:`Py_InspectFlag`: + Use :c:member:`PyConfig.inspect` or + :c:func:`PyConfig_Get("inspect") ` instead. + * :c:var:`Py_OptimizeFlag`: + Use :c:member:`PyConfig.optimization_level` or + :c:func:`PyConfig_Get("optimization_level") ` instead. + * :c:var:`Py_NoSiteFlag`: + Use :c:member:`PyConfig.site_import` or + :c:func:`PyConfig_Get("site_import") ` instead. + * :c:var:`Py_BytesWarningFlag`: + Use :c:member:`PyConfig.bytes_warning` or + :c:func:`PyConfig_Get("bytes_warning") ` instead. + * :c:var:`Py_FrozenFlag`: + Use :c:member:`PyConfig.pathconfig_warnings` or + :c:func:`PyConfig_Get("pathconfig_warnings") ` instead. + * :c:var:`Py_IgnoreEnvironmentFlag`: + Use :c:member:`PyConfig.use_environment` or + :c:func:`PyConfig_Get("use_environment") ` instead. + * :c:var:`Py_DontWriteBytecodeFlag`: + Use :c:member:`PyConfig.write_bytecode` or + :c:func:`PyConfig_Get("write_bytecode") ` instead. + * :c:var:`Py_NoUserSiteDirectory`: + Use :c:member:`PyConfig.user_site_directory` or + :c:func:`PyConfig_Get("user_site_directory") ` instead. + * :c:var:`Py_UnbufferedStdioFlag`: + Use :c:member:`PyConfig.buffered_stdio` or + :c:func:`PyConfig_Get("buffered_stdio") ` instead. + * :c:var:`Py_HashRandomizationFlag`: + Use :c:member:`PyConfig.use_hash_seed` + and :c:member:`PyConfig.hash_seed` or + :c:func:`PyConfig_Get("hash_seed") ` instead. + * :c:var:`Py_IsolatedFlag`: + Use :c:member:`PyConfig.isolated` or + :c:func:`PyConfig_Get("isolated") ` instead. + * :c:var:`Py_LegacyWindowsFSEncodingFlag`: + Use :c:member:`PyPreConfig.legacy_windows_fs_encoding` or + :c:func:`PyConfig_Get("legacy_windows_fs_encoding") ` instead. + * :c:var:`Py_LegacyWindowsStdioFlag`: + Use :c:member:`PyConfig.legacy_windows_stdio` or + :c:func:`PyConfig_Get("legacy_windows_stdio") ` instead. + * :c:var:`!Py_FileSystemDefaultEncoding`, :c:var:`!Py_HasFileSystemDefaultEncoding`: + Use :c:member:`PyConfig.filesystem_encoding` or + :c:func:`PyConfig_Get("filesystem_encoding") ` instead. + * :c:var:`!Py_FileSystemDefaultEncodeErrors`: + Use :c:member:`PyConfig.filesystem_errors` or + :c:func:`PyConfig_Get("filesystem_errors") ` instead. + * :c:var:`!Py_UTF8Mode`: + Use :c:member:`PyPreConfig.utf8_mode` or + :c:func:`PyConfig_Get("utf8_mode") ` instead. + (see :c:func:`Py_PreInitialize`) + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` to set these options. Or :c:func:`PyConfig_Get` can be + used to get these options at runtime. diff --git a/Doc/deprecations/c-api-pending-removal-in-3.18.rst b/Doc/deprecations/c-api-pending-removal-in-3.18.rst new file mode 100644 index 00000000000000..0689d8b4f9e959 --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-3.18.rst @@ -0,0 +1,19 @@ +Pending removal in Python 3.18 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Deprecated private functions (:gh:`128863`): + + * :c:func:`!_PyBytes_Join`: use :c:func:`PyBytes_Join`. + * :c:func:`!_PyDict_GetItemStringWithError`: use :c:func:`PyDict_GetItemStringRef`. + * :c:func:`!_PyDict_Pop()`: :c:func:`PyDict_Pop`. + * :c:func:`!_PyLong_Sign()`: use :c:func:`PyLong_GetSign`. + * :c:func:`!_PyLong_FromDigits` and :c:func:`!_PyLong_New`: + use :c:func:`PyLongWriter_Create`. + * :c:func:`!_PyThreadState_UncheckedGet`: use :c:func:`PyThreadState_GetUnchecked`. + * :c:func:`!_PyUnicode_AsString`: use :c:func:`PyUnicode_AsUTF8`. + * :c:func:`!_Py_HashPointer`: use :c:func:`Py_HashPointer`. + * :c:func:`!_Py_fopen_obj`: use :c:func:`Py_fopen`. + + The `pythoncapi-compat project + `__ can be used to get these + new public functions on Python 3.13 and older. diff --git a/Doc/deprecations/c-api-pending-removal-in-future.rst b/Doc/deprecations/c-api-pending-removal-in-future.rst index 8fc1c80c35d092..1003047344a3cc 100644 --- a/Doc/deprecations/c-api-pending-removal-in-future.rst +++ b/Doc/deprecations/c-api-pending-removal-in-future.rst @@ -34,7 +34,6 @@ although there is currently no date scheduled for their removal. Use :c:func:`!_PyErr_ChainExceptions1` instead. * :c:member:`!PyBytesObject.ob_shash` member: call :c:func:`PyObject_Hash` instead. -* :c:member:`!PyDictObject.ma_version_tag` member. * Thread Local Storage (TLS) API: * :c:func:`PyThread_create_key`: diff --git a/Doc/deprecations/pending-removal-in-3.15.rst b/Doc/deprecations/pending-removal-in-3.15.rst index 3b03e1f49e6754..390bbff2835cf8 100644 --- a/Doc/deprecations/pending-removal-in-3.15.rst +++ b/Doc/deprecations/pending-removal-in-3.15.rst @@ -51,6 +51,11 @@ Pending removal in Python 3.15 This function is only useful for Jython support, has a confusing API, and is largely untested. +* :mod:`sysconfig`: + + * The ``check_home`` argument of :func:`sysconfig.is_python_build` has been + deprecated since Python 3.12. + * :mod:`threading`: * :func:`~threading.RLock` will take no arguments in Python 3.15. diff --git a/Doc/deprecations/pending-removal-in-3.16.rst b/Doc/deprecations/pending-removal-in-3.16.rst index d093deb648baf7..b408a6d72febe0 100644 --- a/Doc/deprecations/pending-removal-in-3.16.rst +++ b/Doc/deprecations/pending-removal-in-3.16.rst @@ -57,6 +57,11 @@ Pending removal in Python 3.16 In the rare case that you need the bitwise inversion of the underlying integer, convert to ``int`` explicitly (``~int(x)``). +* :mod:`functools`: + + * Calling the Python implementation of :func:`functools.reduce` with *function* + or *sequence* as keyword arguments has been deprecated since Python 3.14. + * :mod:`shutil`: * The :class:`!ExecError` exception @@ -75,12 +80,13 @@ Pending removal in Python 3.16 has been deprecated since Python 3.13. Use the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable instead. +* :mod:`sysconfig`: + + * The :func:`!sysconfig.expand_makefile_vars` function + has been deprecated since Python 3.14. + Use the ``vars`` argument of :func:`sysconfig.get_paths` instead. + * :mod:`tarfile`: * The undocumented and unused :attr:`!TarFile.tarfile` attribute has been deprecated since Python 3.13. - -* :mod:`functools`: - - * Calling the Python implementation of :func:`functools.reduce` with *function* - or *sequence* as keyword arguments has been deprecated since Python 3.14. diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index fa7b22bde1dc6f..776bab1ed5b779 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -1906,28 +1906,30 @@ In the standard library code, you will see several common patterns for correctly using identity tests: 1) As recommended by :pep:`8`, an identity test is the preferred way to check -for ``None``. This reads like plain English in code and avoids confusion with -other objects that may have boolean values that evaluate to false. + for ``None``. This reads like plain English in code and avoids confusion + with other objects that may have boolean values that evaluate to false. 2) Detecting optional arguments can be tricky when ``None`` is a valid input -value. In those situations, you can create a singleton sentinel object -guaranteed to be distinct from other objects. For example, here is how -to implement a method that behaves like :meth:`dict.pop`:: + value. In those situations, you can create a singleton sentinel object + guaranteed to be distinct from other objects. For example, here is how + to implement a method that behaves like :meth:`dict.pop`: - _sentinel = object() + .. code-block:: python - def pop(self, key, default=_sentinel): - if key in self: - value = self[key] - del self[key] - return value - if default is _sentinel: - raise KeyError(key) - return default + _sentinel = object() + + def pop(self, key, default=_sentinel): + if key in self: + value = self[key] + del self[key] + return value + if default is _sentinel: + raise KeyError(key) + return default 3) Container implementations sometimes need to augment equality tests with -identity tests. This prevents the code from being confused by objects such as -``float('NaN')`` that are not equal to themselves. + identity tests. This prevents the code from being confused by objects + such as ``float('NaN')`` that are not equal to themselves. For example, here is the implementation of :meth:`!collections.abc.Sequence.__contains__`:: diff --git a/Doc/glossary.rst b/Doc/glossary.rst index f67f3ecad0bc40..d933ca6b467cf3 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -115,7 +115,7 @@ Glossary :keyword:`yield` expression. Each :keyword:`yield` temporarily suspends processing, remembering the - location execution state (including local variables and pending + execution state (including local variables and pending try-statements). When the *asynchronous generator iterator* effectively resumes with another awaitable returned by :meth:`~object.__anext__`, it picks up where it left off. See :pep:`492` and :pep:`525`. @@ -564,7 +564,7 @@ Glossary An object created by a :term:`generator` function. Each :keyword:`yield` temporarily suspends processing, remembering the - location execution state (including local variables and pending + execution state (including local variables and pending try-statements). When the *generator iterator* resumes, it picks up where it left off (in contrast to functions which start fresh on every invocation). @@ -658,6 +658,9 @@ Glossary and therefore it is never deallocated while the interpreter is running. For example, :const:`True` and :const:`None` are immortal in CPython. + Immortal objects can be identified via :func:`sys._is_immortal`, or + via :c:func:`PyUnstable_IsImmortal` in the C API. + immutable An object with a fixed value. Immutable objects include numbers, strings and tuples. Such an object cannot be altered. A new object has to @@ -811,9 +814,11 @@ Glossary processed. loader - An object that loads a module. It must define a method named - :meth:`load_module`. A loader is typically returned by a - :term:`finder`. See also: + An object that loads a module. + It must define the :meth:`!exec_module` and :meth:`!create_module` methods + to implement the :class:`~importlib.abc.Loader` interface. + A loader is typically returned by a :term:`finder`. + See also: * :ref:`finders-and-loaders` * :class:`importlib.abc.Loader` diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst index 46db516e16dae4..0872bedcd3a2d3 100644 --- a/Doc/howto/mro.rst +++ b/Doc/howto/mro.rst @@ -398,7 +398,7 @@ with inheritance diagram We see that class G inherits from F and E, with F *before* E: therefore we would expect the attribute *G.remember2buy* to be inherited by -*F.rembermer2buy* and not by *E.remember2buy*: nevertheless Python 2.2 +*F.remember2buy* and not by *E.remember2buy*: nevertheless Python 2.2 gives >>> G.remember2buy # doctest: +SKIP diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 647ff9da04d10d..4407ba2f7714dd 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -292,10 +292,7 @@ Here is an example module that consumes the ``__main__`` namespace:: if not did_user_define_their_name(): raise ValueError('Define the variable `my_name`!') - if '__file__' in dir(__main__): - print(__main__.my_name, "found in file", __main__.__file__) - else: - print(__main__.my_name) + print(__main__.my_name) Example usage of this module could be as follows:: @@ -330,7 +327,7 @@ status code 0, indicating success: .. code-block:: shell-session $ python start.py - Dinsdale found in file /path/to/start.py + Dinsdale Note that importing ``__main__`` doesn't cause any issues with unintentionally running top-level code meant for script use which is put in the @@ -361,8 +358,5 @@ defined in the REPL becomes part of the ``__main__`` scope:: >>> namely.print_user_name() Jabberwocky -Note that in this case the ``__main__`` scope doesn't contain a ``__file__`` -attribute as it's interactive. - The ``__main__`` scope is used in the implementation of :mod:`pdb` and :mod:`rlcompleter`. diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index ccb362d8c31ddf..15ef33e195904d 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -73,7 +73,7 @@ an event loop: Set *loop* as the current event loop for the current OS thread. - .. deprecated:: next + .. deprecated:: 3.14 The :func:`set_event_loop` function is deprecated and will be removed in Python 3.16. @@ -246,6 +246,9 @@ Scheduling callbacks another thread, this function *must* be used, since :meth:`call_soon` is not thread-safe. + This function is safe to be called from a reentrant context or signal handler, + however, it is not safe or fruitful to use the returned handle in such contexts. + Raises :exc:`RuntimeError` if called on a loop that's been closed. This can happen on a secondary thread when the main application is shutting down. @@ -389,9 +392,9 @@ Creating Futures and Tasks If *factory* is ``None`` the default task factory will be set. Otherwise, *factory* must be a *callable* with the signature matching - ``(loop, coro, context=None)``, where *loop* is a reference to the active + ``(loop, coro, **kwargs)``, where *loop* is a reference to the active event loop, and *coro* is a coroutine object. The callable - must return a :class:`asyncio.Future`-compatible object. + must pass on all *kwargs*, and return a :class:`asyncio.Task`-compatible object. .. method:: loop.get_task_factory() @@ -967,6 +970,9 @@ Watching file descriptors invoke *callback* with the specified arguments once *fd* is available for reading. + Any preexisting callback registered for *fd* is cancelled and replaced by + *callback*. + .. method:: loop.remove_reader(fd) Stop monitoring the *fd* file descriptor for read availability. Returns @@ -978,6 +984,9 @@ Watching file descriptors invoke *callback* with the specified arguments once *fd* is available for writing. + Any preexisting callback registered for *fd* is cancelled and replaced by + *callback*. + Use :func:`functools.partial` :ref:`to pass keyword arguments ` to *callback*. diff --git a/Doc/library/asyncio-graph.rst b/Doc/library/asyncio-graph.rst new file mode 100644 index 00000000000000..814bfe6e269bf9 --- /dev/null +++ b/Doc/library/asyncio-graph.rst @@ -0,0 +1,145 @@ +.. currentmodule:: asyncio + + +.. _asyncio-graph: + +======================== +Call Graph Introspection +======================== + +**Source code:** :source:`Lib/asyncio/graph.py` + +------------------------------------- + +asyncio has powerful runtime call graph introspection utilities +to trace the entire call graph of a running *coroutine* or *task*, or +a suspended *future*. These utilities and the underlying machinery +can be used from within a Python program or by external profilers +and debuggers. + +.. versionadded:: next + + +.. function:: print_call_graph(future=None, /, *, file=None, depth=1, limit=None) + + Print the async call graph for the current task or the provided + :class:`Task` or :class:`Future`. + + This function prints entries starting from the top frame and going + down towards the invocation point. + + The function receives an optional *future* argument. + If not passed, the current running task will be used. + + If the function is called on *the current task*, the optional + keyword-only *depth* argument can be used to skip the specified + number of frames from top of the stack. + + If the optional keyword-only *limit* argument is provided, each call stack + in the resulting graph is truncated to include at most ``abs(limit)`` + entries. If *limit* is positive, the entries left are the closest to + the invocation point. If *limit* is negative, the topmost entries are + left. If *limit* is omitted or ``None``, all entries are present. + If *limit* is ``0``, the call stack is not printed at all, only + "awaited by" information is printed. + + If *file* is omitted or ``None``, the function will print + to :data:`sys.stdout`. + + **Example:** + + The following Python code: + + .. code-block:: python + + import asyncio + + async def test(): + asyncio.print_call_graph() + + async def main(): + async with asyncio.TaskGroup() as g: + g.create_task(test(), name='test') + + asyncio.run(main()) + + will print:: + + * Task(name='test', id=0x1039f0fe0) + + Call stack: + | File 't2.py', line 4, in async test() + + Awaited by: + * Task(name='Task-1', id=0x103a5e060) + + Call stack: + | File 'taskgroups.py', line 107, in async TaskGroup.__aexit__() + | File 't2.py', line 7, in async main() + +.. function:: format_call_graph(future=None, /, *, depth=1, limit=None) + + Like :func:`print_call_graph`, but returns a string. + If *future* is ``None`` and there's no current task, + the function returns an empty string. + + +.. function:: capture_call_graph(future=None, /, *, depth=1, limit=None) + + Capture the async call graph for the current task or the provided + :class:`Task` or :class:`Future`. + + The function receives an optional *future* argument. + If not passed, the current running task will be used. If there's no + current task, the function returns ``None``. + + If the function is called on *the current task*, the optional + keyword-only *depth* argument can be used to skip the specified + number of frames from top of the stack. + + Returns a ``FutureCallGraph`` data class object: + + * ``FutureCallGraph(future, call_stack, awaited_by)`` + + Where *future* is a reference to a :class:`Future` or + a :class:`Task` (or their subclasses.) + + ``call_stack`` is a tuple of ``FrameCallGraphEntry`` objects. + + ``awaited_by`` is a tuple of ``FutureCallGraph`` objects. + + * ``FrameCallGraphEntry(frame)`` + + Where *frame* is a frame object of a regular Python function + in the call stack. + + +Low level utility functions +=========================== + +To introspect an async call graph asyncio requires cooperation from +control flow structures, such as :func:`shield` or :class:`TaskGroup`. +Any time an intermediate :class:`Future` object with low-level APIs like +:meth:`Future.add_done_callback() ` is +involved, the following two functions should be used to inform asyncio +about how exactly such intermediate future objects are connected with +the tasks they wrap or control. + + +.. function:: future_add_to_awaited_by(future, waiter, /) + + Record that *future* is awaited on by *waiter*. + + Both *future* and *waiter* must be instances of + :class:`Future` or :class:`Task` or their subclasses, + otherwise the call would have no effect. + + A call to ``future_add_to_awaited_by()`` must be followed by an + eventual call to the :func:`future_discard_from_awaited_by` function + with the same arguments. + + +.. function:: future_discard_from_awaited_by(future, waiter, /) + + Record that *future* is no longer awaited on by *waiter*. + + Both *future* and *waiter* must be instances of + :class:`Future` or :class:`Task` or their subclasses, otherwise + the call would have no effect. diff --git a/Doc/library/asyncio-policy.rst b/Doc/library/asyncio-policy.rst index 9f86234ce941d1..57f964912dd6ea 100644 --- a/Doc/library/asyncio-policy.rst +++ b/Doc/library/asyncio-policy.rst @@ -48,7 +48,7 @@ for the current process: Return the current process-wide policy. - .. deprecated:: next + .. deprecated:: 3.14 The :func:`get_event_loop_policy` function is deprecated and will be removed in Python 3.16. @@ -58,7 +58,7 @@ for the current process: If *policy* is set to ``None``, the default policy is restored. - .. deprecated:: next + .. deprecated:: 3.14 The :func:`set_event_loop_policy` function is deprecated and will be removed in Python 3.16. @@ -95,7 +95,7 @@ The abstract event loop policy base class is defined as follows: This method should never return ``None``. - .. deprecated:: next + .. deprecated:: 3.14 The :class:`AbstractEventLoopPolicy` class is deprecated and will be removed in Python 3.16. @@ -121,7 +121,7 @@ asyncio ships with the following built-in policies: The :meth:`get_event_loop` method of the default asyncio policy now raises a :exc:`RuntimeError` if there is no set event loop. - .. deprecated:: next + .. deprecated:: 3.14 The :class:`DefaultEventLoopPolicy` class is deprecated and will be removed in Python 3.16. @@ -133,7 +133,7 @@ asyncio ships with the following built-in policies: .. availability:: Windows. - .. deprecated:: next + .. deprecated:: 3.14 The :class:`WindowsSelectorEventLoopPolicy` class is deprecated and will be removed in Python 3.16. @@ -145,7 +145,7 @@ asyncio ships with the following built-in policies: .. availability:: Windows. - .. deprecated:: next + .. deprecated:: 3.14 The :class:`WindowsProactorEventLoopPolicy` class is deprecated and will be removed in Python 3.16. diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst index 61991bf2f4ed1d..066edd424d150e 100644 --- a/Doc/library/asyncio-queue.rst +++ b/Doc/library/asyncio-queue.rst @@ -115,11 +115,11 @@ Queue .. method:: task_done() - Indicate that a formerly enqueued task is complete. + Indicate that a formerly enqueued work item is complete. Used by queue consumers. For each :meth:`~Queue.get` used to - fetch a task, a subsequent call to :meth:`task_done` tells the - queue that the processing on the task is complete. + fetch a work item, a subsequent call to :meth:`task_done` tells the + queue that the processing on the work item is complete. If a :meth:`join` is currently blocking, it will resume when all items have been processed (meaning that a :meth:`task_done` diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst index 5f83b3a2658da4..7d368dae49dc1d 100644 --- a/Doc/library/asyncio.rst +++ b/Doc/library/asyncio.rst @@ -99,6 +99,7 @@ You can experiment with an ``asyncio`` concurrent context in the :term:`REPL`: asyncio-subprocess.rst asyncio-queue.rst asyncio-exceptions.rst + asyncio-graph.rst .. toctree:: :caption: Low-level APIs diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst index ace8529d6e7e0c..1c6b5e03af3560 100644 --- a/Doc/library/calendar.rst +++ b/Doc/library/calendar.rst @@ -38,13 +38,33 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is itself. This is the job of subclasses. - :class:`Calendar` instances have the following methods: + :class:`Calendar` instances have the following methods and attributes: + + .. attribute:: firstweekday + + The first weekday as an integer (0--6). + + This property can also be set and read using + :meth:`~Calendar.setfirstweekday` and + :meth:`~Calendar.getfirstweekday` respectively. + + .. method:: getfirstweekday() + + Return an :class:`int` for the current first weekday (0--6). + + Identical to reading the :attr:`~Calendar.firstweekday` property. + + .. method:: setfirstweekday(firstweekday) + + Set the first weekday to *firstweekday*, passed as an :class:`int` (0--6) + + Identical to setting the :attr:`~Calendar.firstweekday` property. .. method:: iterweekdays() Return an iterator for the week day numbers that will be used for one week. The first value from the iterator will be the same as the value of - the :attr:`firstweekday` property. + the :attr:`~Calendar.firstweekday` property. .. method:: itermonthdates(year, month) @@ -153,7 +173,7 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is on the first weekday as specified in the constructor or set by the :meth:`setfirstweekday` method. - .. versionchanged:: next + .. versionchanged:: 3.14 If *highlight_day* is given, this date is highlighted in color. This can be :ref:`controlled using environment variables `. @@ -181,7 +201,7 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is on the first weekday as specified in the constructor or set by the :meth:`setfirstweekday` method. - .. versionchanged:: next + .. versionchanged:: 3.14 If *highlight_day* is given, this date is highlighted in color. This can be :ref:`controlled using environment variables `. @@ -209,7 +229,7 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is :meth:`setfirstweekday` method. The earliest year for which a calendar can be generated is platform-dependent. - .. versionchanged:: next + .. versionchanged:: 3.14 If *highlight_day* is given, this date is highlighted in color. This can be :ref:`controlled using environment variables `. @@ -707,7 +727,7 @@ The following options are accepted: The number of months printed per row. Defaults to 3. -.. versionchanged:: next +.. versionchanged:: 3.14 By default, today's date is highlighted in color and can be :ref:`controlled using environment variables `. diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 398cb92bac809a..615138302e1379 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -266,8 +266,8 @@ Fundamental data types (1) The constructor accepts any object with a truth value. -Additionally, if IEC 60559 compatible complex arithmetic (Annex G) is supported, the following -complex types are available: +Additionally, if IEC 60559 compatible complex arithmetic (Annex G) is supported +in both C and ``libffi``, the following complex types are available: +----------------------------------+---------------------------------+-----------------+ | ctypes type | C type | Python type | @@ -870,6 +870,36 @@ invalid non-\ ``NULL`` pointers would crash Python):: ValueError: NULL pointer access >>> +.. _ctypes-thread-safety: + +Thread safety without the GIL +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In Python 3.13, the :term:`GIL` may be disabled on :term:`experimental free threaded ` builds. +In ctypes, reads and writes to a single object concurrently is safe, but not across multiple objects: + + .. code-block:: pycon + + >>> number = c_int(42) + >>> pointer_a = pointer(number) + >>> pointer_b = pointer(number) + +In the above, it's only safe for one object to read and write to the address at once if the GIL is disabled. +So, ``pointer_a`` can be shared and written to across multiple threads, but only if ``pointer_b`` +is not also attempting to do the same. If this is an issue, consider using a :class:`threading.Lock` +to synchronize access to memory: + + .. code-block:: pycon + + >>> import threading + >>> lock = threading.Lock() + >>> # Thread 1 + >>> with lock: + ... pointer_a.contents = 24 + >>> # Thread 2 + >>> with lock: + ... pointer_b.contents = 42 + .. _ctypes-type-conversions: @@ -1812,6 +1842,8 @@ different ways, depending on the type and number of the parameters in the call: the COM interface as first argument, in addition to those parameters that are specified in the :attr:`!argtypes` tuple. + .. availability:: Windows + The optional *paramflags* parameter creates foreign function wrappers with much more functionality than the features described above. @@ -2201,7 +2233,7 @@ Utility functions .. audit-event:: ctypes.memoryview_at address,size,readonly - .. versionadded:: next + .. versionadded:: 3.14 .. _ctypes-data-types: diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst index a86e227429b06d..b33fe82a6e4c9f 100644 --- a/Doc/library/email.contentmanager.rst +++ b/Doc/library/email.contentmanager.rst @@ -157,7 +157,13 @@ Currently the email package provides only one concrete content manager, :exc:`ValueError`. * For ``str`` objects, if *cte* is not set use heuristics to - determine the most compact encoding. + determine the most compact encoding. Prior to encoding, + :meth:`str.splitlines` is used to normalize all line boundaries, + ensuring that each line of the payload is terminated by the + current policy's :data:`~email.policy.Policy.linesep` property + (even if the original string did not end with one). + * For ``bytes`` objects, *cte* is taken to be base64 if not set, + and the aforementioned newline translation is not performed. * For :class:`~email.message.EmailMessage`, per :rfc:`2046`, raise an error if a *cte* of ``quoted-printable`` or ``base64`` is requested for *subtype* ``rfc822``, and for any *cte* other than diff --git a/Doc/library/errno.rst b/Doc/library/errno.rst index d8033663ea8eac..48b9762d85c2e1 100644 --- a/Doc/library/errno.rst +++ b/Doc/library/errno.rst @@ -672,6 +672,171 @@ defined by the module. The specific list of defined symbols is available as .. versionadded:: 3.11 + +.. data:: ENOMEDIUM + + No medium found + + +.. data:: EMEDIUMTYPE + + Wrong medium type + + +.. data:: ENOKEY + + Required key not available + + +.. data:: EKEYEXPIRED + + Key has expired + + +.. data:: EKEYREVOKED + + Key has been revoked + + +.. data:: EKEYREJECTED + + Key was rejected by service + + +.. data:: ERFKILL + + Operation not possible due to RF-kill + + +.. data:: ELOCKUNMAPPED + + Locked lock was unmapped + + +.. data:: ENOTACTIVE + + Facility is not active + + +.. data:: EAUTH + + Authentication error + + .. versionadded:: 3.2 + + +.. data:: EBADARCH + + Bad CPU type in executable + + .. versionadded:: 3.2 + + +.. data:: EBADEXEC + + Bad executable (or shared library) + + .. versionadded:: 3.2 + + +.. data:: EBADMACHO + + Malformed Mach-o file + + .. versionadded:: 3.2 + + +.. data:: EDEVERR + + Device error + + .. versionadded:: 3.2 + + +.. data:: EFTYPE + + Inappropriate file type or format + + .. versionadded:: 3.2 + + +.. data:: ENEEDAUTH + + Need authenticator + + .. versionadded:: 3.2 + + +.. data:: ENOATTR + + Attribute not found + + .. versionadded:: 3.2 + + +.. data:: ENOPOLICY + + Policy not found + + .. versionadded:: 3.2 + + +.. data:: EPROCLIM + + Too many processes + + .. versionadded:: 3.2 + + +.. data:: EPROCUNAVAIL + + Bad procedure for program + + .. versionadded:: 3.2 + + +.. data:: EPROGMISMATCH + + Program version wrong + + .. versionadded:: 3.2 + + +.. data:: EPROGUNAVAIL + + RPC prog. not avail + + .. versionadded:: 3.2 + + +.. data:: EPWROFF + + Device power is off + + .. versionadded:: 3.2 + + +.. data:: EBADRPC + + RPC struct is bad + + .. versionadded:: 3.2 + + +.. data:: ERPCMISMATCH + + RPC version wrong + + .. versionadded:: 3.2 + + +.. data:: ESHLIBVERS + + Shared library version mismatch + + .. versionadded:: 3.2 + + .. data:: ENOTCAPABLE Capabilities insufficient. This error is mapped to the exception diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index f72b11e34c5c3d..319d261ef3fb4d 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -562,9 +562,13 @@ The following exceptions are the exceptions that are usually raised. Raised when the interpreter finds an internal error, but the situation does not look so serious to cause it to abandon all hope. The associated value is a - string indicating what went wrong (in low-level terms). + string indicating what went wrong (in low-level terms). In :term:`CPython`, + this could be raised by incorrectly using Python's C API, such as returning + a ``NULL`` value without an exception set. - You should report this to the author or maintainer of your Python interpreter. + If you're confident that this exception wasn't your fault, or the fault of + a package you're using, you should report this to the author or maintainer + of your Python interpreter. Be sure to report the version of the Python interpreter (``sys.version``; it is also printed at the start of an interactive Python session), the exact error message (the exception's associated value) and if possible the source of the diff --git a/Doc/library/faulthandler.rst b/Doc/library/faulthandler.rst index b81da4af3cff58..b7df9f6b9bcf96 100644 --- a/Doc/library/faulthandler.rst +++ b/Doc/library/faulthandler.rst @@ -91,7 +91,7 @@ Fault handler state The dump now mentions if a garbage collector collection is running if *all_threads* is true. - .. versionchanged:: next + .. versionchanged:: 3.14 Only the current thread is dumped if the :term:`GIL` is disabled to prevent the risk of data races. diff --git a/Doc/library/fnmatch.rst b/Doc/library/fnmatch.rst index fda44923f204fc..5cb47777ae527d 100644 --- a/Doc/library/fnmatch.rst +++ b/Doc/library/fnmatch.rst @@ -46,9 +46,15 @@ module. See module :mod:`glob` for pathname expansion (:mod:`glob` uses a period are not special for this module, and are matched by the ``*`` and ``?`` patterns. -Also note that :func:`functools.lru_cache` with the *maxsize* of 32768 is used to -cache the compiled regex patterns in the following functions: :func:`fnmatch`, -:func:`fnmatchcase`, :func:`.filter`. +Unless stated otherwise, "filename string" and "pattern string" either refer to +:class:`str` or ``ISO-8859-1`` encoded :class:`bytes` objects. Note that the +functions documented below do not allow to mix a :class:`!bytes` pattern with +a :class:`!str` filename, and vice-versa. + +Finally, note that :func:`functools.lru_cache` with a *maxsize* of 32768 +is used to cache the (typed) compiled regex patterns in the following +functions: :func:`fnmatch`, :func:`fnmatchcase`, :func:`.filter`. + .. function:: fnmatch(name, pat) @@ -78,8 +84,8 @@ cache the compiled regex patterns in the following functions: :func:`fnmatch`, .. function:: filter(names, pat) - Construct a list from those elements of the :term:`iterable` *names* - that match pattern *pat*. + Construct a list from those elements of the :term:`iterable` of filename + strings *names* that match the pattern string *pat*. It is the same as ``[n for n in names if fnmatch(n, pat)]``, but implemented more efficiently. @@ -87,7 +93,7 @@ cache the compiled regex patterns in the following functions: :func:`fnmatch`, .. function:: translate(pat) Return the shell-style pattern *pat* converted to a regular expression for - using with :func:`re.match`. + using with :func:`re.match`. The pattern is expected to be a :class:`str`. Example: diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 69d9d81c848124..8ad5f48c9e5286 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -368,8 +368,8 @@ The :mod:`functools` module defines the following functions: If :data:`Placeholder` sentinels are present in *args*, they will be filled first when :func:`!partial` is called. This makes it possible to pre-fill any positional - argument with a call to :func:`!partial`; without :data:`!Placeholder`, only the - first positional argument can be pre-filled. + argument with a call to :func:`!partial`; without :data:`!Placeholder`, + only the chosen number of leading positional arguments can be pre-filled. If any :data:`!Placeholder` sentinels are present, all must be filled at call time: diff --git a/Doc/library/http.cookies.rst b/Doc/library/http.cookies.rst index ad37a0fca4742d..eb196320721194 100644 --- a/Doc/library/http.cookies.rst +++ b/Doc/library/http.cookies.rst @@ -142,6 +142,7 @@ Morsel Objects version httponly samesite + partitioned The attribute :attr:`httponly` specifies that the cookie is only transferred in HTTP requests, and is not accessible through JavaScript. This is intended @@ -151,6 +152,19 @@ Morsel Objects send the cookie along with cross-site requests. This helps to mitigate CSRF attacks. Valid values for this attribute are "Strict" and "Lax". + The attribute :attr:`partitioned` indicates to user agents that these + cross-site cookies *should* only be available in the same top-level context + that the cookie was first set in. For this to be accepted by the user agent, + you **must** also set ``Secure``. + + In addition, it is recommended to use the ``__Host`` prefix when setting + partitioned cookies to make them bound to the hostname and not the + registrable domain. Read + `CHIPS (Cookies Having Independent Partitioned State)`_ + for full details and examples. + + .. _CHIPS (Cookies Having Independent Partitioned State): https://github.com/privacycg/CHIPS/blob/main/README.md + The keys are case-insensitive and their default value is ``''``. .. versionchanged:: 3.5 @@ -165,6 +179,9 @@ Morsel Objects .. versionchanged:: 3.8 Added support for the :attr:`samesite` attribute. + .. versionchanged:: 3.14 + Added support for the :attr:`partitioned` attribute. + .. attribute:: Morsel.value diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst index 54995ddbfbca12..4085bdf6598d98 100644 --- a/Doc/library/importlib.resources.abc.rst +++ b/Doc/library/importlib.resources.abc.rst @@ -43,7 +43,7 @@ :const:`None`. An object compatible with this ABC should only be returned when the specified module is a package. - .. deprecated-removed:: 3.12 3.14 + .. deprecated:: 3.12 Use :class:`importlib.resources.abc.TraversableResources` instead. .. abstractmethod:: open_resource(resource) diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index 9e088a598a6c08..b935fc0e42a4bd 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -380,13 +380,15 @@ ABC hierarchy:: .. class:: ResourceLoader + *Superseded by TraversableResources* + An abstract base class for a :term:`loader` which implements the optional :pep:`302` protocol for loading arbitrary resources from the storage back-end. .. deprecated:: 3.7 This ABC is deprecated in favour of supporting resource loading - through :class:`importlib.resources.abc.ResourceReader`. + through :class:`importlib.resources.abc.TraversableResources`. .. abstractmethod:: get_data(path) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 0085207d3055f2..544efed1a76b96 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -150,6 +150,12 @@ attributes (see :ref:`import-mod-attrs` for module attributes): | | f_locals | local namespace seen by | | | | this frame | +-----------------+-------------------+---------------------------+ +| | f_generator | returns the generator or | +| | | coroutine object that | +| | | owns this frame, or | +| | | ``None`` if the frame is | +| | | of a regular function | ++-----------------+-------------------+---------------------------+ | | f_trace | tracing function for this | | | | frame, or ``None`` | +-----------------+-------------------+---------------------------+ @@ -310,6 +316,10 @@ attributes (see :ref:`import-mod-attrs` for module attributes): Add ``__builtins__`` attribute to functions. +.. versionchanged:: next + + Add ``f_generator`` attribute to frames. + .. function:: getmembers(object[, predicate]) Return all the members of an object in a list of ``(name, value)`` diff --git a/Doc/library/json.rst b/Doc/library/json.rst index 169291f74f44a5..4e7046d6d8f6ac 100644 --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -258,36 +258,86 @@ Basic Usage the original one. That is, ``loads(dumps(x)) != x`` if x has non-string keys. -.. function:: load(fp, *, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw) +.. function:: load(fp, *, cls=None, object_hook=None, parse_float=None, \ + parse_int=None, parse_constant=None, \ + object_pairs_hook=None, **kw) - Deserialize *fp* (a ``.read()``-supporting :term:`text file` or - :term:`binary file` containing a JSON document) to a Python object using - this :ref:`conversion table `. + Deserialize *fp* to a Python object + using the :ref:`JSON-to-Python conversion table `. - *object_hook* is an optional function that will be called with the result of - any object literal decoded (a :class:`dict`). The return value of - *object_hook* will be used instead of the :class:`dict`. This feature can - be used to implement custom decoders (e.g. `JSON-RPC - `_ class hinting). + :param fp: + A ``.read()``-supporting :term:`text file` or :term:`binary file` + containing the JSON document to be deserialized. + :type fp: :term:`file-like object` - *object_pairs_hook* is an optional function that will be called with the - result of any object literal decoded with an ordered list of pairs. The - return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders. If - *object_hook* is also defined, the *object_pairs_hook* takes priority. + :param cls: + If set, a custom JSON decoder. + Additional keyword arguments to :func:`!load` + will be passed to the constructor of *cls*. + If ``None`` (the default), :class:`!JSONDecoder` is used. + :type cls: a :class:`JSONDecoder` subclass + + :param object_hook: + If set, a function that is called with the result of + any object literal decoded (a :class:`dict`). + The return value of this function will be used + instead of the :class:`dict`. + This feature can be used to implement custom decoders, + for example `JSON-RPC `_ class hinting. + Default ``None``. + :type object_hook: :term:`callable` | None + + :param object_pairs_hook: + If set, a function that is called with the result of + any object literal decoded with an ordered list of pairs. + The return value of this function will be used + instead of the :class:`dict`. + This feature can be used to implement custom decoders. + If *object_hook* is also set, *object_pairs_hook* takes priority. + Default ``None``. + :type object_pairs_hook: :term:`callable` | None + + :param parse_float: + If set, a function that is called with + the string of every JSON float to be decoded. + If ``None`` (the default), it is equivalent to ``float(num_str)``. + This can be used to parse JSON floats into custom datatypes, + for example :class:`decimal.Decimal`. + :type parse_float: :term:`callable` | None + + :param parse_int: + If set, a function that is called with + the string of every JSON int to be decoded. + If ``None`` (the default), it is equivalent to ``int(num_str)``. + This can be used to parse JSON integers into custom datatypes, + for example :class:`float`. + :type parse_int: :term:`callable` | None + + :param parse_constant: + If set, a function that is called with one of the following strings: + ``'-Infinity'``, ``'Infinity'``, or ``'NaN'``. + This can be used to raise an exception + if invalid JSON numbers are encountered. + Default ``None``. + :type parse_constant: :term:`callable` | None + + :raises JSONDecodeError: + When the data being deserialized is not a valid JSON document. + + :raises UnicodeDecodeError: + When the data being deserialized does not contain + UTF-8, UTF-16 or UTF-32 encoded data. .. versionchanged:: 3.1 - Added support for *object_pairs_hook*. - *parse_float* is an optional function that will be called with the string of - every JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser for - JSON floats (e.g. :class:`decimal.Decimal`). + * Added the optional *object_pairs_hook* parameter. + * *parse_constant* doesn't get called on 'null', 'true', 'false' anymore. - *parse_int* is an optional function that will be called with the string of - every JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser for - JSON integers (e.g. :class:`float`). + .. versionchanged:: 3.6 + + * All optional parameters are now :ref:`keyword-only `. + * *fp* can now be a :term:`binary file`. + The input encoding should be UTF-8, UTF-16 or UTF-32. .. versionchanged:: 3.11 The default *parse_int* of :func:`int` now limits the maximum length of @@ -295,38 +345,13 @@ Basic Usage conversion length limitation ` to help avoid denial of service attacks. - *parse_constant* is an optional function that will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be - used to raise an exception if invalid JSON numbers are encountered. - - .. versionchanged:: 3.1 - *parse_constant* doesn't get called on 'null', 'true', 'false' anymore. - - To use a custom :class:`JSONDecoder` subclass, specify it with the ``cls`` - kwarg; otherwise :class:`JSONDecoder` is used. Additional keyword arguments - will be passed to the constructor of the class. - - If the data being deserialized is not a valid JSON document, a - :exc:`JSONDecodeError` will be raised. - - .. versionchanged:: 3.6 - All optional parameters are now :ref:`keyword-only `. - - .. versionchanged:: 3.6 - *fp* can now be a :term:`binary file`. The input encoding should be - UTF-8, UTF-16 or UTF-32. - .. function:: loads(s, *, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw) - Deserialize *s* (a :class:`str`, :class:`bytes` or :class:`bytearray` + Identical to :func:`load`, but instead of a file-like object, + deserialize *s* (a :class:`str`, :class:`bytes` or :class:`bytearray` instance containing a JSON document) to a Python object using this :ref:`conversion table `. - The other arguments have the same meaning as in :func:`load`. - - If the data being deserialized is not a valid JSON document, a - :exc:`JSONDecodeError` will be raised. - .. versionchanged:: 3.6 *s* can now be of type :class:`bytes` or :class:`bytearray`. The input encoding should be UTF-8, UTF-16 or UTF-32. diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst index 5a081f9e7add99..ffb54591b3563b 100644 --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -613,7 +613,7 @@ The :class:`SysLogHandler` class, located in the :mod:`logging.handlers` module, supports sending logging messages to a remote or local Unix syslog. -.. class:: SysLogHandler(address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER, socktype=socket.SOCK_DGRAM) +.. class:: SysLogHandler(address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER, socktype=socket.SOCK_DGRAM, timeout=None) Returns a new instance of the :class:`SysLogHandler` class intended to communicate with a remote Unix machine whose address is given by *address* in @@ -626,6 +626,11 @@ supports sending logging messages to a remote or local Unix syslog. *socktype* argument, which defaults to :const:`socket.SOCK_DGRAM` and thus opens a UDP socket. To open a TCP socket (for use with the newer syslog daemons such as rsyslog), specify a value of :const:`socket.SOCK_STREAM`. + If *timeout* is specified, it sets a timeout (in seconds) for the socket operations. + This can help prevent the program from hanging indefinitely if the syslog server is + unreachable. By default, *timeout* is ``None``, meaning no timeout is applied. + + Note that if your server is not listening on UDP port 514, :class:`SysLogHandler` may appear not to work. In that case, check what @@ -645,6 +650,8 @@ supports sending logging messages to a remote or local Unix syslog. .. versionchanged:: 3.2 *socktype* was added. + .. versionchanged:: 3.14 + *timeout* was added. .. method:: close() diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 69e6192038ab2b..7d3596622862ea 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1659,6 +1659,33 @@ or `the MSDN `_ on Windo :exc:`InterruptedError` exception (see :pep:`475` for the rationale). +.. function:: readinto(fd, buffer, /) + + Read from a file descriptor *fd* into a mutable + :ref:`buffer object ` *buffer*. + + The *buffer* should be mutable and :term:`bytes-like `. On + success, returns the number of bytes read. Less bytes may be read than the + size of the buffer. The underlying system call will be retried when + interrupted by a signal, unless the signal handler raises an exception. + Other errors will not be retried and an error will be raised. + + Returns 0 if *fd* is at end of file or if the provided *buffer* has + length 0 (which can be used to check for errors without reading data). + Never returns negative. + + .. note:: + + This function is intended for low-level I/O and must be applied to a file + descriptor as returned by :func:`os.open` or :func:`os.pipe`. To read a + "file object" returned by the built-in function :func:`open`, or + :data:`sys.stdin`, use its member functions, for example + :meth:`io.BufferedIOBase.readinto`, :meth:`io.BufferedIOBase.read`, or + :meth:`io.TextIOBase.read` + + .. versionadded:: next + + .. function:: sendfile(out_fd, in_fd, offset, count) sendfile(out_fd, in_fd, offset, count, headers=(), trailers=(), flags=0) @@ -5411,6 +5438,8 @@ information, consult your Unix manpages. The following scheduling policies are exposed if they are supported by the operating system. +.. _os-scheduling-policy: + .. data:: SCHED_OTHER The default scheduling policy. @@ -5424,7 +5453,7 @@ operating system. Scheduling policy for tasks with deadline constraints. - .. versionadded:: next + .. versionadded:: 3.14 .. data:: SCHED_IDLE @@ -5434,7 +5463,7 @@ operating system. Alias for :data:`SCHED_OTHER`. - .. versionadded:: next + .. versionadded:: 3.14 .. data:: SCHED_SPORADIC @@ -5514,7 +5543,7 @@ operating system. .. function:: sched_yield() - Voluntarily relinquish the CPU. + Voluntarily relinquish the CPU. See :manpage:`sched_yield(2)` for details. .. function:: sched_setaffinity(pid, mask, /) diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index 6c099b22b38c21..f9d1213fb6d29d 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -179,13 +179,15 @@ slightly different way: .. versionadded:: 3.14 The *commands* argument. -.. function:: post_mortem(traceback=None) +.. function:: post_mortem(t=None) - Enter post-mortem debugging of the given *traceback* object. If no - *traceback* is given, it uses the one of the exception that is currently - being handled (an exception must be being handled if the default is to be - used). + Enter post-mortem debugging of the given exception or + :ref:`traceback object `. If no value is given, it uses + the exception that is currently being handled, or raises ``ValueError`` if + there isn’t one. + .. versionchanged:: 3.13 + Support for exception objects was added. .. function:: pm() diff --git a/Doc/library/pyexpat.rst b/Doc/library/pyexpat.rst index 0f3b58ef6ea5af..2d57cff10a9278 100644 --- a/Doc/library/pyexpat.rst +++ b/Doc/library/pyexpat.rst @@ -945,7 +945,7 @@ The ``errors`` module has the following attributes: The parser was tried to be stopped or suspended before it started. - .. versionadded:: next + .. versionadded:: 3.14 .. rubric:: Footnotes diff --git a/Doc/library/re.rst b/Doc/library/re.rst index 29387a429b844c..e2a78dc95d4ae1 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -572,7 +572,7 @@ character ``'$'``. Word boundaries are determined by the current locale if the :py:const:`~re.LOCALE` flag is used. - .. versionchanged:: next + .. versionchanged:: 3.14 ``\B`` now matches empty input string. .. index:: single: \d; in regular expressions diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index 2a8592f8bd69c1..06800c4588b663 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -512,7 +512,9 @@ the use of userspace buffers in Python as in "``outfd.write(infd.read())``". On macOS `fcopyfile`_ is used to copy the file content (not metadata). -On Linux and Solaris :func:`os.sendfile` is used. +On Linux :func:`os.copy_file_range` or :func:`os.sendfile` is used. + +On Solaris :func:`os.sendfile` is used. On Windows :func:`shutil.copyfile` uses a bigger default buffer size (1 MiB instead of 64 KiB) and a :func:`memoryview`-based variant of @@ -527,6 +529,10 @@ file then shutil will silently fallback on using less efficient .. versionchanged:: 3.14 Solaris now uses :func:`os.sendfile`. +.. versionchanged:: next + Copy-on-write or server-side copy may be used internally via + :func:`os.copy_file_range` on supported Linux filesystems. + .. _shutil-copytree-example: copytree example diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 8ba2bd1dcce8cc..b36acad29ecb00 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -678,7 +678,7 @@ Constants Constant to enable duplicate address and port bindings with load balancing. - .. versionadded:: next + .. versionadded:: 3.14 .. availability:: FreeBSD >= 12.0 diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 9d7b6aa66cd443..37ea32dc8a56e5 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -938,7 +938,7 @@ Constants Whether the OpenSSL library has built-in support for TLS-PHA. - .. versionadded:: next + .. versionadded:: 3.14 .. data:: CHANNEL_BINDING_TYPES diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 191827526e890f..6050784264707b 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1548,6 +1548,100 @@ objects that compare equal might have different :attr:`~range.start`, single: str (built-in class); (see also string) pair: object; string +.. _text-methods-summary: + +Text and Binary Sequence Type Methods Summary +============================================= +The following table summarizes the text and binary sequence types methods by +category. + + ++--------------------------+-------------------------------------------+---------------------------------------------------+ +| Category | :class:`str` methods | :class:`bytes` and :class:`bytearray` methods | ++==========================+===========================================+===================================================+ +| Formatting | :meth:`str.format` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.format_map` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :ref:`f-strings` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :ref:`old-string-formatting` | :ref:`bytes-formatting` | ++--------------------------+------------------+------------------------+--------------------+------------------------------+ +| Searching and Replacing | :meth:`str.find` | :meth:`str.rfind` | :meth:`bytes.find` | :meth:`bytes.rfind` | +| +------------------+------------------------+--------------------+------------------------------+ +| | :meth:`str.index`| :meth:`str.rindex` | :meth:`bytes.index`| :meth:`bytes.rindex` | +| +------------------+------------------------+--------------------+------------------------------+ +| | :meth:`str.startswith` | :meth:`bytes.startswith` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.endswith` | :meth:`bytes.endswith` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.count` | :meth:`bytes.count` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.replace` | :meth:`bytes.replace` | ++--------------------------+-------------------+-----------------------+---------------------+-----------------------------+ +| Splitting and Joining | :meth:`str.split` | :meth:`str.rsplit` | :meth:`bytes.split` | :meth:`bytes.rsplit` | +| +-------------------+-----------------------+---------------------+-----------------------------+ +| | :meth:`str.splitlines` | :meth:`bytes.splitlines` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.partition` | :meth:`bytes.partition` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.rpartition` | :meth:`bytes.rpartition` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.join` | :meth:`bytes.join` | ++--------------------------+-------------------------------------------+---------------------------------------------------+ +| String Classification | :meth:`str.isalpha` | :meth:`bytes.isalpha` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isdecimal` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isdigit` | :meth:`bytes.isdigit` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isnumeric` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isalnum` | :meth:`bytes.isalnum` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isidentifier` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.islower` | :meth:`bytes.islower` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isupper` | :meth:`bytes.isupper` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.istitle` | :meth:`bytes.istitle` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isspace` | :meth:`bytes.isspace` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.isprintable` | | ++--------------------------+-------------------------------------------+---------------------------------------------------+ +| Case Manipulation | :meth:`str.lower` | :meth:`bytes.lower` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.upper` | :meth:`bytes.upper` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.casefold` | | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.capitalize` | :meth:`bytes.capitalize` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.title` | :meth:`bytes.title` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.swapcase` | :meth:`bytes.swapcase` | ++--------------------------+-------------------+-----------------------+---------------------+-----------------------------+ +| Padding and Stripping | :meth:`str.ljust` | :meth:`str.rjust` | :meth:`bytes.ljust` | :meth:`bytes.rjust` | +| +-------------------+-----------------------+---------------------+-----------------------------+ +| | :meth:`str.center` | :meth:`bytes.center` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.expandtabs` | :meth:`bytes.expandtabs` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.strip` | :meth:`bytes.strip` | +| +--------------------+----------------------+----------------------+----------------------------+ +| | :meth:`str.lstrip` | :meth:`str.rstrip` | :meth:`bytes.lstrip` | :meth:`bytes.rstrip` | ++--------------------------+--------------------+----------------------+----------------------+----------------------------+ +| Translation and Encoding | :meth:`str.translate` | :meth:`bytes.translate` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.maketrans` | :meth:`bytes.maketrans` | +| +-------------------------------------------+---------------------------------------------------+ +| | :meth:`str.encode` | | +| +-------------------------------------------+---------------------------------------------------+ +| | | :meth:`bytes.decode` | ++--------------------------+-------------------------------------------+---------------------------------------------------+ + .. _textseq: Text Sequence Type --- :class:`str` diff --git a/Doc/library/string.rst b/Doc/library/string.rst index 913672a3ff2270..09165c481b246e 100644 --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -59,11 +59,18 @@ The constants defined in this module are: String of ASCII characters which are considered punctuation characters in the ``C`` locale: ``!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~``. + .. data:: printable - String of ASCII characters which are considered printable. This is a - combination of :const:`digits`, :const:`ascii_letters`, :const:`punctuation`, - and :const:`whitespace`. + String of ASCII characters which are considered printable by Python. + This is a combination of :const:`digits`, :const:`ascii_letters`, + :const:`punctuation`, and :const:`whitespace`. + + .. note:: + + By design, :meth:`string.printable.isprintable() ` + returns :const:`False`. In particular, ``string.printable`` is not + printable in the POSIX sense (see :manpage:`LC_CTYPE `). .. data:: whitespace diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index dd6293c722e7ad..855237e0984972 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -8,7 +8,7 @@ This module provides access to some variables used or maintained by the interpreter and to functions that interact strongly with the interpreter. It is -always available. +always available. Unless explicitly noted otherwise, all variables are read-only. .. data:: abiflags @@ -855,6 +855,11 @@ always available. reflect the actual number of references. Consequently, do not rely on the returned value to be accurate, other than a value of 0 or 1. + .. impl-detail:: + + :term:`Immortal ` objects with a large reference count can be + identified via :func:`_is_immortal`. + .. versionchanged:: 3.12 Immortal objects have very large refcounts that do not match the actual number of references to the object. @@ -1264,6 +1269,24 @@ always available. .. versionadded:: 3.12 +.. function:: _is_immortal(op) + + Return :const:`True` if the given object is :term:`immortal`, :const:`False` + otherwise. + + .. note:: + + Objects that are immortal (and thus return ``True`` upon being passed + to this function) are not guaranteed to be immortal in future versions, + and vice versa for mortal objects. + + .. versionadded:: next + + .. impl-detail:: + + This function should be used for specialized purposes only. + It is not guaranteed to exist in all implementations of Python. + .. function:: _is_interned(string) Return :const:`True` if the given string is "interned", :const:`False` @@ -1422,6 +1445,7 @@ always available. AIX ``'aix'`` Android ``'android'`` Emscripten ``'emscripten'`` + FreeBSD ``'freebsd'`` iOS ``'ios'`` Linux ``'linux'`` macOS ``'darwin'`` @@ -1432,12 +1456,12 @@ always available. On Unix systems not listed in the table, the value is the lowercased OS name as returned by ``uname -s``, with the first part of the version as returned by - ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, *at the time - when Python was built*. Unless you want to test for a specific system - version, it is therefore recommended to use the following idiom:: + ``uname -r`` appended, e.g. ``'sunos5'``, *at the time when Python was built*. + Unless you want to test for a specific system version, it is therefore + recommended to use the following idiom:: - if sys.platform.startswith('freebsd'): - # FreeBSD-specific code here... + if sys.platform.startswith('sunos'): + # SunOS-specific code here... .. versionchanged:: 3.3 On Linux, :data:`sys.platform` doesn't contain the major version anymore. @@ -1451,6 +1475,10 @@ always available. On Android, :data:`sys.platform` now returns ``'android'`` rather than ``'linux'``. + .. versionchanged:: 3.14 + On FreeBSD, :data:`sys.platform` doesn't contain the major version anymore. + It is always ``'freebsd'``, instead of ``'freebsd13'`` or ``'freebsd14'``. + .. seealso:: :data:`os.name` has a coarser granularity. :func:`os.uname` gives diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index 3921908b7c7bfc..9f018f9c8f0e50 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -388,7 +388,8 @@ Other functions Windows will return one of: - - win-amd64 (64bit Windows on AMD64, aka x86_64, Intel64, and EM64T) + - win-amd64 (64-bit Windows on AMD64, aka x86_64, Intel64, and EM64T) + - win-arm64 (64-bit Windows on ARM64, aka AArch64) - win32 (all others - specifically, sys.platform is returned) macOS can return: diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index f183f3f535c4cb..00511df32e4388 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -380,6 +380,13 @@ since it is impossible to detect the termination of alien threads. This method will raise a :exc:`RuntimeError` if called more than once on the same thread object. + If supported, set the operating system thread name to + :attr:`threading.Thread.name`. The name can be truncated depending on the + operating system thread name limits. + + .. versionchanged:: 3.14 + Set the operating system thread name. + .. method:: run() Method representing the thread's activity. @@ -443,9 +450,6 @@ since it is impossible to detect the termination of alien threads. running thread is renamed. (Setting the *name* attribute of a different thread only updates the Python Thread object.) - .. versionchanged:: 3.14 - Set the operating system thread name. - .. method:: getName() setName() diff --git a/Doc/library/time.rst b/Doc/library/time.rst index 6265c2214eaa0d..804e2679027bd4 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -385,6 +385,8 @@ Functions The suspension time may be longer than requested by an arbitrary amount, because of the scheduling of other activity in the system. + .. rubric:: Windows implementation + On Windows, if *secs* is zero, the thread relinquishes the remainder of its time slice to any other thread that is ready to run. If there are no other threads ready to run, the function returns immediately, and the thread @@ -393,12 +395,19 @@ Functions `_ which provides resolution of 100 nanoseconds. If *secs* is zero, ``Sleep(0)`` is used. - Unix implementation: + .. rubric:: Unix implementation * Use ``clock_nanosleep()`` if available (resolution: 1 nanosecond); * Or use ``nanosleep()`` if available (resolution: 1 nanosecond); * Or use ``select()`` (resolution: 1 microsecond). + .. note:: + + To emulate a "no-op", use :keyword:`pass` instead of ``time.sleep(0)``. + + To voluntarily relinquish the CPU, specify a real-time :ref:`scheduling + policy ` and use :func:`os.sched_yield` instead. + .. audit-event:: time.sleep secs .. versionchanged:: 3.5 diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst index f719319a302a23..b80917eae66f8b 100644 --- a/Doc/library/tokenize.rst +++ b/Doc/library/tokenize.rst @@ -91,11 +91,10 @@ write back the modified script. sequences with at least two elements, the token type and the token string. Any additional sequence elements are ignored. - The reconstructed script is returned as a single string. The result is - guaranteed to tokenize back to match the input so that the conversion is - lossless and round-trips are assured. The guarantee applies only to the - token type and token string as the spacing between tokens (column - positions) may change. + The result is guaranteed to tokenize back to match the input so that the + conversion is lossless and round-trips are assured. The guarantee applies + only to the token type and token string as the spacing between tokens + (column positions) may change. It returns bytes, encoded using the :data:`~token.ENCODING` token, which is the first token sequence output by :func:`.tokenize`. If there is no diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst index 512647f5f6e01f..98dfa0f53bc6a5 100644 --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -213,6 +213,31 @@ useful when working with learners for whom typing is not a skill. use turtle graphics with a learner. +Automatically begin and end filling +----------------------------------- + +Starting with Python 3.14, you can use the :func:`fill` :term:`context manager` +instead of :func:`begin_fill` and :func:`end_fill` to automatically begin and +end fill. Here is an example:: + + with fill(): + for i in range(4): + forward(100) + right(90) + + forward(200) + +The code above is equivalent to:: + + begin_fill() + for i in range(4): + forward(100) + right(90) + end_fill() + + forward(200) + + Use the ``turtle`` module namespace ----------------------------------- @@ -351,6 +376,7 @@ Pen control Filling | :func:`filling` + | :func:`fill` | :func:`begin_fill` | :func:`end_fill` @@ -381,6 +407,7 @@ Using events | :func:`ondrag` Special Turtle methods + | :func:`poly` | :func:`begin_poly` | :func:`end_poly` | :func:`get_poly` @@ -403,6 +430,7 @@ Window control | :func:`setworldcoordinates` Animation control + | :func:`no_animation` | :func:`delay` | :func:`tracer` | :func:`update` @@ -993,8 +1021,8 @@ Settings for measurement >>> turtle.heading() 90.0 - Change angle measurement unit to grad (also known as gon, - grade, or gradian and equals 1/100-th of the right angle.) + >>> # Change angle measurement unit to grad (also known as gon, + >>> # grade, or gradian and equals 1/100-th of the right angle.) >>> turtle.degrees(400.0) >>> turtle.heading() 100.0 @@ -1275,6 +1303,29 @@ Filling ... else: ... turtle.pensize(3) +.. function:: fill() + + Fill the shape drawn in the ``with turtle.fill():`` block. + + .. doctest:: + :skipif: _tkinter is None + + >>> turtle.color("black", "red") + >>> with turtle.fill(): + ... turtle.circle(80) + + Using :func:`!fill` is equivalent to adding the :func:`begin_fill` before the + fill-block and :func:`end_fill` after the fill-block: + + .. doctest:: + :skipif: _tkinter is None + + >>> turtle.color("black", "red") + >>> turtle.begin_fill() + >>> turtle.circle(80) + >>> turtle.end_fill() + + .. versionadded:: next .. function:: begin_fill() @@ -1648,6 +1699,23 @@ Using events Special Turtle methods ---------------------- + +.. function:: poly() + + Record the vertices of a polygon drawn in the ``with turtle.poly():`` block. + The first and last vertices will be connected. + + .. doctest:: + :skipif: _tkinter is None + + >>> with turtle.poly(): + ... turtle.forward(100) + ... turtle.right(60) + ... turtle.forward(100) + + .. versionadded:: next + + .. function:: begin_poly() Start recording the vertices of a polygon. Current turtle position is first @@ -1926,6 +1994,23 @@ Window control Animation control ----------------- +.. function:: no_animation() + + Temporarily disable turtle animation. The code written inside the + ``no_animation`` block will not be animated; + once the code block is exited, the drawing will appear. + + .. doctest:: + :skipif: _tkinter is None + + >>> with screen.no_animation(): + ... for dist in range(2, 400, 2): + ... fd(dist) + ... rt(90) + + .. versionadded:: next + + .. function:: delay(delay=None) :param delay: positive integer @@ -2235,7 +2320,7 @@ Settings and special methods Add a turtle shape to TurtleScreen's shapelist. Only thusly registered shapes can be used by issuing the command ``shape(shapename)``. - .. versionchanged:: next + .. versionchanged:: 3.14 Added support for PNG, PGM, and PPM image formats. Both a shape name and an image file name can be specified. diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index 7f8b710f611002..5bb7f88d585e3b 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -883,6 +883,12 @@ Test cases | :meth:`assertNotIsInstance(a, b) | ``not isinstance(a, b)`` | 3.2 | | ` | | | +-----------------------------------------+-----------------------------+---------------+ + | :meth:`assertIsSubclass(a, b) | ``issubclass(a, b)`` | 3.14 | + | ` | | | + +-----------------------------------------+-----------------------------+---------------+ + | :meth:`assertNotIsSubclass(a, b) | ``not issubclass(a, b)`` | 3.14 | + | ` | | | + +-----------------------------------------+-----------------------------+---------------+ All the assert methods accept a *msg* argument that, if specified, is used as the error message on failure (see also :data:`longMessage`). @@ -961,6 +967,15 @@ Test cases .. versionadded:: 3.2 + .. method:: assertIsSubclass(cls, superclass, msg=None) + assertNotIsSubclass(cls, superclass, msg=None) + + Test that *cls* is (or is not) a subclass of *superclass* (which can be a + class or a tuple of classes, as supported by :func:`issubclass`). + To check for the exact type, use :func:`assertIs(cls, superclass) `. + + .. versionadded:: 3.14 + It is also possible to check the production of exceptions, warnings, and log messages using the following methods: @@ -1210,6 +1225,24 @@ Test cases | ` | elements in the same number, | | | | regardless of their order. | | +---------------------------------------+--------------------------------+--------------+ + | :meth:`assertStartsWith(a, b) | ``a.startswith(b)`` | 3.14 | + | ` | | | + +---------------------------------------+--------------------------------+--------------+ + | :meth:`assertNotStartsWith(a, b) | ``not a.startswith(b)`` | 3.14 | + | ` | | | + +---------------------------------------+--------------------------------+--------------+ + | :meth:`assertEndsWith(a, b) | ``a.endswith(b)`` | 3.14 | + | ` | | | + +---------------------------------------+--------------------------------+--------------+ + | :meth:`assertNotEndsWith(a, b) | ``not a.endswith(b)`` | 3.14 | + | ` | | | + +---------------------------------------+--------------------------------+--------------+ + | :meth:`assertHasAttr(a, b) | ``hastattr(a, b)`` | 3.14 | + | ` | | | + +---------------------------------------+--------------------------------+--------------+ + | :meth:`assertNotHasAttr(a, b) | ``not hastattr(a, b)`` | 3.14 | + | ` | | | + +---------------------------------------+--------------------------------+--------------+ .. method:: assertAlmostEqual(first, second, places=7, msg=None, delta=None) @@ -1279,6 +1312,34 @@ Test cases .. versionadded:: 3.2 + .. method:: assertStartsWith(s, prefix, msg=None) + .. method:: assertNotStartsWith(s, prefix, msg=None) + + Test that the Unicode or byte string *s* starts (or does not start) + with a *prefix*. + *prefix* can also be a tuple of strings to try. + + .. versionadded:: 3.14 + + + .. method:: assertEndsWith(s, suffix, msg=None) + .. method:: assertNotEndsWith(s, suffix, msg=None) + + Test that the Unicode or byte string *s* ends (or does not end) + with a *suffix*. + *suffix* can also be a tuple of strings to try. + + .. versionadded:: 3.14 + + + .. method:: assertHasAttr(obj, name, msg=None) + .. method:: assertNotHasAttr(obj, name, msg=None) + + Test that the object *obj* has (or has not) an attribute *name*. + + .. versionadded:: 3.14 + + .. _type-specific-methods: The :meth:`assertEqual` method dispatches the equality check for objects of diff --git a/Doc/library/uuid.rst b/Doc/library/uuid.rst index 39e82d0e19a9ac..09b4d4ac4ffd56 100644 --- a/Doc/library/uuid.rst +++ b/Doc/library/uuid.rst @@ -289,6 +289,25 @@ of the :attr:`~UUID.variant` attribute: Reserved for future definition. +The :mod:`uuid` module defines the special Nil and Max UUID values: + + +.. data:: NIL + + A special form of UUID that is specified to have all 128 bits set to zero + according to :rfc:`RFC 9562, §5.9 <9562#section-5.9>`. + + .. versionadded:: next + + +.. data:: MAX + + A special form of UUID that is specified to have all 128 bits set to one + according to :rfc:`RFC 9562, §5.10 <9562#section-5.10>`. + + .. versionadded:: next + + .. seealso:: :rfc:`9562` - A Universally Unique IDentifier (UUID) URN Namespace @@ -380,6 +399,14 @@ Here are some examples of typical usage of the :mod:`uuid` module:: >>> uuid.UUID(bytes=x.bytes) UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') + >>> # get the Nil UUID + >>> uuid.NIL + UUID('00000000-0000-0000-0000-000000000000') + + >>> # get the Max UUID + >>> uuid.MAX + UUID('ffffffff-ffff-ffff-ffff-ffffffffffff') + .. _uuid-cli-example: diff --git a/Doc/license.rst b/Doc/license.rst index 428dc22b817ebe..90783e3e31a69d 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -11,59 +11,63 @@ History of the software ======================= Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see https://www.cwi.nl/) in the Netherlands as a +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands as a successor of a language called ABC. Guido remains Python's principal author, although it includes many contributions from others. In 1995, Guido continued his work on Python at the Corporation for National -Research Initiatives (CNRI, see https://www.cnri.reston.va.us/) in Reston, +Research Initiatives (CNRI, see https://www.cnri.reston.va.us) in Reston, Virginia where he released several versions of the software. In May 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. In October of the same year, the PythonLabs -team moved to Digital Creations (now Zope Corporation; see -https://www.zope.org/). In 2001, the Python Software Foundation (PSF, see +team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see https://www.python.org/psf/) was formed, a non-profit organization created -specifically to own Python-related Intellectual Property. Zope Corporation is a +specifically to own Python-related Intellectual Property. Zope Corporation was a sponsoring member of the PSF. -All Python releases are Open Source (see https://opensource.org/ for the Open +All Python releases are Open Source (see https://opensource.org for the Open Source Definition). Historically, most, but not all, Python releases have also been GPL-compatible; the table below summarizes the various releases. -+----------------+--------------+------------+------------+-----------------+ -| Release | Derived from | Year | Owner | GPL compatible? | -+================+==============+============+============+=================+ -| 0.9.0 thru 1.2 | n/a | 1991-1995 | CWI | yes | -+----------------+--------------+------------+------------+-----------------+ -| 1.3 thru 1.5.2 | 1.2 | 1995-1999 | CNRI | yes | -+----------------+--------------+------------+------------+-----------------+ -| 1.6 | 1.5.2 | 2000 | CNRI | no | -+----------------+--------------+------------+------------+-----------------+ -| 2.0 | 1.6 | 2000 | BeOpen.com | no | -+----------------+--------------+------------+------------+-----------------+ -| 1.6.1 | 1.6 | 2001 | CNRI | no | -+----------------+--------------+------------+------------+-----------------+ -| 2.1 | 2.0+1.6.1 | 2001 | PSF | no | -+----------------+--------------+------------+------------+-----------------+ -| 2.0.1 | 2.0+1.6.1 | 2001 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.1.1 | 2.1+2.0.1 | 2001 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.1.2 | 2.1.1 | 2002 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.1.3 | 2.1.2 | 2002 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.2 and above | 2.1.1 | 2001-now | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ ++----------------+--------------+------------+------------+---------------------+ +| Release | Derived from | Year | Owner | GPL-compatible? (1) | ++================+==============+============+============+=====================+ +| 0.9.0 thru 1.2 | n/a | 1991-1995 | CWI | yes | ++----------------+--------------+------------+------------+---------------------+ +| 1.3 thru 1.5.2 | 1.2 | 1995-1999 | CNRI | yes | ++----------------+--------------+------------+------------+---------------------+ +| 1.6 | 1.5.2 | 2000 | CNRI | no | ++----------------+--------------+------------+------------+---------------------+ +| 2.0 | 1.6 | 2000 | BeOpen.com | no | ++----------------+--------------+------------+------------+---------------------+ +| 1.6.1 | 1.6 | 2001 | CNRI | yes (2) | ++----------------+--------------+------------+------------+---------------------+ +| 2.1 | 2.0+1.6.1 | 2001 | PSF | no | ++----------------+--------------+------------+------------+---------------------+ +| 2.0.1 | 2.0+1.6.1 | 2001 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.1.1 | 2.1+2.0.1 | 2001 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.1.2 | 2.1.1 | 2002 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.1.3 | 2.1.2 | 2002 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.2 and above | 2.1.1 | 2001-now | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ .. note:: - GPL-compatible doesn't mean that we're distributing Python under the GPL. All - Python licenses, unlike the GPL, let you distribute a modified version without - making your changes open source. The GPL-compatible licenses make it possible to - combine Python with other software that is released under the GPL; the others - don't. + (1) GPL-compatible doesn't mean that we're distributing Python under the GPL. + All Python licenses, unlike the GPL, let you distribute a modified version + without making your changes open source. The GPL-compatible licenses make + it possible to combine Python with other software that is released under + the GPL; the others don't. + + (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, because its license + has a choice of law clause. According to CNRI, however, Stallman's lawyer has + told CNRI's lawyer that 1.6.1 is "not incompatible" with the GPL. Thanks to the many outside volunteers who have worked under Guido's direction to make these releases possible. @@ -73,10 +77,10 @@ Terms and conditions for accessing or otherwise using Python ============================================================ Python software and documentation are licensed under the -:ref:`PSF License Agreement `. +Python Software Foundation License Version 2. Starting with Python 3.8.6, examples, recipes, and other code in -the documentation are dual licensed under the PSF License Agreement +the documentation are dual licensed under the PSF License Version 2 and the :ref:`Zero-Clause BSD license `. Some software incorporated into Python is under different licenses. @@ -86,39 +90,38 @@ See :ref:`OtherLicenses` for an incomplete list of these licenses. .. _PSF-license: -PSF LICENSE AGREEMENT FOR PYTHON |release| ------------------------------------------- +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- .. parsed-literal:: 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and - the Individual or Organization ("Licensee") accessing and otherwise using Python - |release| software in source or binary form and its associated documentation. + the Individual or Organization ("Licensee") accessing and otherwise using this + software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, - distribute, and otherwise use Python |release| alone or in any derivative + distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright © 2001 Python Software Foundation; All Rights - Reserved" are retained in Python |release| alone or in any derivative version + Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or - incorporates Python |release| or any part thereof, and wants to make the + incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby - agrees to include in any such work a brief summary of the changes made to Python - |release|. + agrees to include in any such work a brief summary of the changes made to Python. - 4. PSF is making Python |release| available to Licensee on an "AS IS" basis. + 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE - USE OF PYTHON |release| WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. - 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON |release| + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF - MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON |release|, OR ANY DERIVATIVE + MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of @@ -130,7 +133,7 @@ PSF LICENSE AGREEMENT FOR PYTHON |release| trademark sense to endorse or promote products or services of Licensee, or any third party. - 8. By copying, installing or otherwise using Python |release|, Licensee agrees + 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. @@ -205,7 +208,7 @@ CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 Agreement. This Agreement together with Python 1.6.1 may be located on the internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This Agreement may also be obtained from a proxy server on the - internet using the following URL: http://hdl.handle.net/1895.22/1013." + internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 1.6.1 or any part thereof, and wants to make the derivative @@ -273,8 +276,8 @@ CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 .. _BSD0: -ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON |release| DOCUMENTATION ----------------------------------------------------------------------- +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +------------------------------------------------------------ .. parsed-literal:: @@ -371,7 +374,7 @@ Project, https://www.wide.ad.jp/. :: may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND + THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE PROJECT OR CONTRIBUTORS BE LIABLE @@ -580,7 +583,7 @@ interface:: notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND + THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE @@ -881,7 +884,7 @@ sources unless the build is configured ``--with-system-libffi``:: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the - ``Software''), to deal in the Software without restriction, including + "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to @@ -890,7 +893,7 @@ sources unless the build is configured ``--with-system-libffi``:: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT @@ -1119,7 +1122,7 @@ The file is distributed under the 2-Clause BSD License:: notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, diff --git a/Doc/make.bat b/Doc/make.bat index 87d8359ef112bb..99f0d5c44f0098 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -127,16 +127,14 @@ goto end :build if not exist "%BUILDDIR%" mkdir "%BUILDDIR%" -rem PY_MISC_NEWS_DIR is also used by our Sphinx extension in tools/extensions/pyspecific.py -if not defined PY_MISC_NEWS_DIR set PY_MISC_NEWS_DIR=%BUILDDIR%\%1 -if not exist "%PY_MISC_NEWS_DIR%" mkdir "%PY_MISC_NEWS_DIR%" +if not exist build mkdir build if exist ..\Misc\NEWS ( - echo.Copying Misc\NEWS to %PY_MISC_NEWS_DIR%\NEWS - copy ..\Misc\NEWS "%PY_MISC_NEWS_DIR%\NEWS" > nul + echo.Copying existing Misc\NEWS file to Doc\build\NEWS + copy ..\Misc\NEWS build\NEWS > nul ) else if exist ..\Misc\NEWS.D ( if defined BLURB ( echo.Merging Misc/NEWS with %BLURB% - %BLURB% merge -f "%PY_MISC_NEWS_DIR%\NEWS" + %BLURB% merge -f build\NEWS ) else ( echo.No Misc/NEWS file and Blurb is not available. exit /B 1 @@ -144,12 +142,12 @@ if exist ..\Misc\NEWS ( ) if defined PAPER ( - set SPHINXOPTS=-D latex_elements.papersize=%PAPER% %SPHINXOPTS% + set SPHINXOPTS=--define latex_elements.papersize=%PAPER% %SPHINXOPTS% ) if "%1" EQU "htmlhelp" ( - set SPHINXOPTS=-D html_theme_options.body_max_width=none %SPHINXOPTS% + set SPHINXOPTS=--define html_theme_options.body_max_width=none %SPHINXOPTS% ) -cmd /S /C "%SPHINXBUILD% %SPHINXOPTS% -b%1 -dbuild\doctrees . "%BUILDDIR%\%1" %2 %3 %4 %5 %6 %7 %8 %9" +cmd /S /C "%SPHINXBUILD% %SPHINXOPTS% --builder %1 --doctree-dir build\doctrees . "%BUILDDIR%\%1" %2 %3 %4 %5 %6 %7 %8 %9" if "%1" EQU "htmlhelp" ( "%HTMLHELP%" "%BUILDDIR%\htmlhelp\python%DISTVERSION:.=%.hhp" diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index 1b1e9f479cbe08..71cc0c83de567e 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -1217,8 +1217,10 @@ A function definition defines a user-defined function object (see section : | `parameter_list_no_posonly` parameter_list_no_posonly: `defparameter` ("," `defparameter`)* ["," [`parameter_list_starargs`]] : | `parameter_list_starargs` - parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]] - : | "**" `parameter` [","] + parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," [`parameter_star_kwargs`]] + : "*" ("," `defparameter`)+ ["," [`parameter_star_kwargs`]] + : | `parameter_star_kwargs` + parameter_star_kwargs: "**" `parameter` [","] parameter: `identifier` [":" `expression`] star_parameter: `identifier` [":" ["*"] `expression`] defparameter: `parameter` ["=" `expression`] diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index ac363e8cfa00dc..48fdd0f5d021c7 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -762,10 +762,10 @@ module. The current working directory -- denoted by an empty string -- is handled slightly differently from other entries on :data:`sys.path`. First, if the -current working directory is found to not exist, no value is stored in -:data:`sys.path_importer_cache`. Second, the value for the current working -directory is looked up fresh for each module lookup. Third, the path used for -:data:`sys.path_importer_cache` and returned by +current working directory cannot be determined or is found not to exist, no +value is stored in :data:`sys.path_importer_cache`. Second, the value for the +current working directory is looked up fresh for each module lookup. Third, +the path used for :data:`sys.path_importer_cache` and returned by :meth:`importlib.machinery.PathFinder.find_spec` will be the actual current working directory and not the empty string. diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt deleted file mode 100644 index c8027a05706c21..00000000000000 --- a/Doc/requirements-oldest-sphinx.txt +++ /dev/null @@ -1,35 +0,0 @@ -# Requirements to build the Python documentation, for the oldest supported -# Sphinx version. -# -# We pin Sphinx and all of its dependencies to ensure a consistent environment. - -blurb -python-docs-theme>=2022.1 - -# Generated from: -# pip install "Sphinx~=7.2.6" -# pip freeze -# -# Sphinx 7.2.6 comes from ``needs_sphinx = '7.2.6'`` in ``Doc/conf.py``. - -alabaster==0.7.16 -babel==2.16.0 -certifi==2024.12.14 -charset-normalizer==3.4.0 -docutils==0.20.1 -idna==3.10 -imagesize==1.4.1 -Jinja2==3.1.5 -MarkupSafe==3.0.2 -packaging==24.2 -Pygments==2.18.0 -requests==2.32.3 -snowballstemmer==2.2.0 -Sphinx==7.2.6 -sphinxcontrib-applehelp==2.0.0 -sphinxcontrib-devhelp==2.0.0 -sphinxcontrib-htmlhelp==2.1.0 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==2.0.0 -sphinxcontrib-serializinghtml==2.0.0 -urllib3==2.3.0 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 5105786ccf283c..32ff8f74d05bb6 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -3,9 +3,10 @@ # Note that when updating this file, you will likely also have to update # the Doc/constraints.txt file. -# Sphinx version is pinned so that new versions that introduce new warnings +# The Sphinx version is pinned so that new versions that introduce new warnings # won't suddenly cause build failures. Updating the version is fine as long # as no warnings are raised by doing so. +# Keep this version in sync with ``Doc/conf.py``. sphinx~=8.1.0 blurb diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 6940c95ab2c9a1..ad24fe82f754fc 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -12,7 +12,6 @@ Doc/c-api/stable.rst Doc/c-api/type.rst Doc/c-api/typeobj.rst Doc/extending/extending.rst -Doc/glossary.rst Doc/library/ast.rst Doc/library/asyncio-extending.rst Doc/library/asyncio-subprocess.rst diff --git a/Doc/tools/extensions/availability.py b/Doc/tools/extensions/availability.py index 47833fdcb87590..1a2c7b02b44439 100644 --- a/Doc/tools/extensions/availability.py +++ b/Doc/tools/extensions/availability.py @@ -6,6 +6,7 @@ from docutils import nodes from sphinx import addnodes +from sphinx.locale import _ as sphinx_gettext from sphinx.util import logging from sphinx.util.docutils import SphinxDirective @@ -55,7 +56,7 @@ class Availability(SphinxDirective): final_argument_whitespace = True def run(self) -> list[nodes.container]: - title = "Availability" + title = sphinx_gettext("Availability") refnode = addnodes.pending_xref( title, nodes.inline(title, title, classes=["xref", "std", "std-ref"]), diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py index 50065d34a2c27a..089614a1f6c421 100644 --- a/Doc/tools/extensions/c_annotations.py +++ b/Doc/tools/extensions/c_annotations.py @@ -16,7 +16,6 @@ from pathlib import Path from typing import TYPE_CHECKING -import sphinx from docutils import nodes from docutils.statemachine import StringList from sphinx import addnodes @@ -285,16 +284,6 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.connect("builder-inited", init_annotations) app.connect("doctree-read", add_annotations) - if sphinx.version_info[:2] < (7, 2): - from docutils.parsers.rst import directives - from sphinx.domains.c import CObject - - # monkey-patch C object... - CObject.option_spec |= { - "no-index-entry": directives.flag, - "no-contents-entry": directives.flag, - } - return { "version": "1.0", "parallel_read_safe": True, diff --git a/Doc/tools/extensions/changes.py b/Doc/tools/extensions/changes.py new file mode 100644 index 00000000000000..8de5e7f78c6627 --- /dev/null +++ b/Doc/tools/extensions/changes.py @@ -0,0 +1,90 @@ +"""Support for documenting version of changes, additions, deprecations.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sphinx.domains.changeset import ( + VersionChange, + versionlabel_classes, + versionlabels, +) +from sphinx.locale import _ as sphinx_gettext + +if TYPE_CHECKING: + from docutils.nodes import Node + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + + +def expand_version_arg(argument: str, release: str) -> str: + """Expand "next" to the current version""" + if argument == "next": + return sphinx_gettext("{} (unreleased)").format(release) + return argument + + +class PyVersionChange(VersionChange): + def run(self) -> list[Node]: + # Replace the 'next' special token with the current development version + self.arguments[0] = expand_version_arg( + self.arguments[0], self.config.release + ) + return super().run() + + +class DeprecatedRemoved(VersionChange): + required_arguments = 2 + + _deprecated_label = sphinx_gettext( + "Deprecated since version %s, will be removed in version %s" + ) + _removed_label = sphinx_gettext( + "Deprecated since version %s, removed in version %s" + ) + + def run(self) -> list[Node]: + # Replace the first two arguments (deprecated version and removed version) + # with a single tuple of both versions. + version_deprecated = expand_version_arg( + self.arguments[0], self.config.release + ) + version_removed = self.arguments.pop(1) + if version_removed == "next": + raise ValueError( + "deprecated-removed:: second argument cannot be `next`" + ) + self.arguments[0] = version_deprecated, version_removed + + # Set the label based on if we have reached the removal version + current_version = tuple(map(int, self.config.version.split("."))) + removed_version = tuple(map(int, version_removed.split("."))) + if current_version < removed_version: + versionlabels[self.name] = self._deprecated_label + versionlabel_classes[self.name] = "deprecated" + else: + versionlabels[self.name] = self._removed_label + versionlabel_classes[self.name] = "removed" + try: + return super().run() + finally: + # reset versionlabels and versionlabel_classes + versionlabels[self.name] = "" + versionlabel_classes[self.name] = "" + + +def setup(app: Sphinx) -> ExtensionMetadata: + # Override Sphinx's directives with support for 'next' + app.add_directive("versionadded", PyVersionChange, override=True) + app.add_directive("versionchanged", PyVersionChange, override=True) + app.add_directive("versionremoved", PyVersionChange, override=True) + app.add_directive("deprecated", PyVersionChange, override=True) + + # Register the ``.. deprecated-removed::`` directive + app.add_directive("deprecated-removed", DeprecatedRemoved) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/misc_news.py b/Doc/tools/extensions/misc_news.py new file mode 100644 index 00000000000000..a24c440595ee92 --- /dev/null +++ b/Doc/tools/extensions/misc_news.py @@ -0,0 +1,75 @@ +"""Support for including Misc/NEWS.""" + +from __future__ import annotations + +import re +from pathlib import Path +from typing import TYPE_CHECKING + +from docutils import nodes +from sphinx.locale import _ as sphinx_gettext +from sphinx.util.docutils import SphinxDirective + +if TYPE_CHECKING: + from typing import Final + + from docutils.nodes import Node + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + + +BLURB_HEADER = """\ ++++++++++++ +Python News ++++++++++++ +""" + +bpo_issue_re: Final[re.Pattern[str]] = re.compile( + "(?:issue #|bpo-)([0-9]+)", re.ASCII +) +gh_issue_re: Final[re.Pattern[str]] = re.compile( + "gh-(?:issue-)?([0-9]+)", re.ASCII | re.IGNORECASE +) +whatsnew_re: Final[re.Pattern[str]] = re.compile( + r"^what's new in (.*?)\??$", re.ASCII | re.IGNORECASE | re.MULTILINE +) + + +class MiscNews(SphinxDirective): + has_content = False + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = {} + + def run(self) -> list[Node]: + # Get content of NEWS file + source, _ = self.get_source_info() + news_file = Path(source).resolve().parent / self.arguments[0] + self.env.note_dependency(news_file) + try: + news_text = news_file.read_text(encoding="utf-8") + except (OSError, UnicodeError): + text = sphinx_gettext("The NEWS file is not available.") + return [nodes.strong(text, text)] + + # remove first 3 lines as they are the main heading + news_text = news_text.removeprefix(BLURB_HEADER) + + news_text = bpo_issue_re.sub(r":issue:`\1`", news_text) + # Fallback handling for GitHub issues + news_text = gh_issue_re.sub(r":gh:`\1`", news_text) + news_text = whatsnew_re.sub(r"\1", news_text) + + self.state_machine.insert_input(news_text.splitlines(), str(news_file)) + return [] + + +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_directive("miscnews", MiscNews) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/pydoc_topics.py b/Doc/tools/extensions/pydoc_topics.py new file mode 100644 index 00000000000000..6e43df2e4bf9de --- /dev/null +++ b/Doc/tools/extensions/pydoc_topics.py @@ -0,0 +1,187 @@ +"""Support for building "topic help" for pydoc.""" + +from __future__ import annotations + +from time import asctime +from typing import TYPE_CHECKING + +from sphinx.builders.text import TextBuilder +from sphinx.util import logging +from sphinx.util.display import status_iterator +from sphinx.util.docutils import new_document +from sphinx.writers.text import TextTranslator + +if TYPE_CHECKING: + from collections.abc import Sequence, Set + + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + +logger = logging.getLogger(__name__) + +_PYDOC_TOPIC_LABELS: Sequence[str] = sorted({ + "assert", + "assignment", + "assignment-expressions", + "async", + "atom-identifiers", + "atom-literals", + "attribute-access", + "attribute-references", + "augassign", + "await", + "binary", + "bitwise", + "bltin-code-objects", + "bltin-ellipsis-object", + "bltin-null-object", + "bltin-type-objects", + "booleans", + "break", + "callable-types", + "calls", + "class", + "comparisons", + "compound", + "context-managers", + "continue", + "conversions", + "customization", + "debugger", + "del", + "dict", + "dynamic-features", + "else", + "exceptions", + "execmodel", + "exprlists", + "floating", + "for", + "formatstrings", + "function", + "global", + "id-classes", + "identifiers", + "if", + "imaginary", + "import", + "in", + "integers", + "lambda", + "lists", + "naming", + "nonlocal", + "numbers", + "numeric-types", + "objects", + "operator-summary", + "pass", + "power", + "raise", + "return", + "sequence-types", + "shifting", + "slicings", + "specialattrs", + "specialnames", + "string-methods", + "strings", + "subscriptions", + "truth", + "try", + "types", + "typesfunctions", + "typesmapping", + "typesmethods", + "typesmodules", + "typesseq", + "typesseq-mutable", + "unary", + "while", + "with", + "yield", +}) + + +class PydocTopicsBuilder(TextBuilder): + name = "pydoc-topics" + + def init(self) -> None: + super().init() + self.topics: dict[str, str] = {} + + def get_outdated_docs(self) -> str: + # Return a string describing what an update build will build. + return "all pydoc topics" + + def write_documents(self, _docnames: Set[str]) -> None: + env = self.env + + labels: dict[str, tuple[str, str, str]] + labels = env.domains.standard_domain.labels + + # docname -> list of (topic_label, label_id) pairs + doc_labels: dict[str, list[tuple[str, str]]] = {} + for topic_label in _PYDOC_TOPIC_LABELS: + try: + docname, label_id, _section_name = labels[topic_label] + except KeyError: + logger.warning("label %r not in documentation", topic_label) + continue + doc_labels.setdefault(docname, []).append((topic_label, label_id)) + + for docname, label_ids in status_iterator( + doc_labels.items(), + "building topics... ", + length=len(doc_labels), + stringify_func=_display_labels, + ): + doctree = env.get_and_resolve_doctree(docname, builder=self) + doc_ids = doctree.ids + for topic_label, label_id in label_ids: + document = new_document("
") + document.append(doc_ids[label_id]) + visitor = TextTranslator(document, builder=self) + document.walkabout(visitor) + self.topics[topic_label] = visitor.body + + def finish(self) -> None: + topics_repr = "\n".join( + f" '{topic}': {_repr(self.topics[topic])}," + for topic in sorted(self.topics) + ) + topics = f"""\ +# Autogenerated by Sphinx on {asctime()} +# as part of the release process. + +topics = {{ +{topics_repr} +}} +""" + self.outdir.joinpath("topics.py").write_text(topics, encoding="utf-8") + + +def _display_labels(item: tuple[str, Sequence[tuple[str, str]]]) -> str: + _docname, label_ids = item + labels = [name for name, _id in label_ids] + if len(labels) > 4: + return f"{labels[0]}, {labels[1]}, ..., {labels[-2]}, {labels[-1]}" + return ", ".join(labels) + + +def _repr(text: str, /) -> str: + """Return a triple-single-quoted representation of text.""" + if "'''" not in text: + return f"r'''{text}'''" + text = text.replace("\\", "\\\\").replace("'''", r"\'\'\'") + return f"'''{text}'''" + + +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_builder(PydocTopicsBuilder) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index bcb8a421e32d09..57cf80a7e77324 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -12,21 +12,14 @@ import re import io from os import getenv, path -from time import asctime -from pprint import pformat from docutils import nodes -from docutils.io import StringOutput from docutils.parsers.rst import directives -from docutils.utils import new_document, unescape +from docutils.utils import unescape from sphinx import addnodes -from sphinx.builders import Builder -from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes from sphinx.domains.python import PyFunction, PyMethod, PyModule from sphinx.locale import _ as sphinx_gettext from sphinx.util.docutils import SphinxDirective -from sphinx.writers.text import TextWriter, TextTranslator -from sphinx.util.display import status_iterator ISSUE_URI = 'https://bugs.python.org/issue?@action=redirect&bpo=%s' @@ -41,16 +34,6 @@ Body.enum.converters['lowerroman'] = \ Body.enum.converters['upperroman'] = lambda x: None -# monkey-patch the productionlist directive to allow hyphens in group names -# https://github.com/sphinx-doc/sphinx/issues/11854 -from sphinx.domains import std - -std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`') - -# backport :no-index: -PyModule.option_spec['no-index'] = directives.flag - - # Support for marking up and linking to bugs.python.org issues def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): @@ -107,32 +90,6 @@ def run(self): return [pnode] -# Support for documenting decorators - -class PyDecoratorMixin(object): - def handle_signature(self, sig, signode): - ret = super(PyDecoratorMixin, self).handle_signature(sig, signode) - signode.insert(0, addnodes.desc_addname('@', '@')) - return ret - - def needs_arglist(self): - return False - - -class PyDecoratorFunction(PyDecoratorMixin, PyFunction): - def run(self): - # a decorator function is a function after all - self.name = 'py:function' - return PyFunction.run(self) - - -# TODO: Use sphinx.domains.python.PyDecoratorMethod when possible -class PyDecoratorMethod(PyDecoratorMixin, PyMethod): - def run(self): - self.name = 'py:method' - return PyMethod.run(self) - - class PyCoroutineMixin(object): def handle_signature(self, sig, signode): ret = super(PyCoroutineMixin, self).handle_signature(sig, signode) @@ -184,160 +141,6 @@ def run(self): return PyMethod.run(self) -# Support for documenting version of changes, additions, deprecations - -def expand_version_arg(argument, release): - """Expand "next" to the current version""" - if argument == 'next': - return sphinx_gettext('{} (unreleased)').format(release) - return argument - - -class PyVersionChange(VersionChange): - def run(self): - # Replace the 'next' special token with the current development version - self.arguments[0] = expand_version_arg(self.arguments[0], - self.config.release) - return super().run() - - -class DeprecatedRemoved(VersionChange): - required_arguments = 2 - - _deprecated_label = sphinx_gettext('Deprecated since version %s, will be removed in version %s') - _removed_label = sphinx_gettext('Deprecated since version %s, removed in version %s') - - def run(self): - # Replace the first two arguments (deprecated version and removed version) - # with a single tuple of both versions. - version_deprecated = expand_version_arg(self.arguments[0], - self.config.release) - version_removed = self.arguments.pop(1) - if version_removed == 'next': - raise ValueError( - 'deprecated-removed:: second argument cannot be `next`') - self.arguments[0] = version_deprecated, version_removed - - # Set the label based on if we have reached the removal version - current_version = tuple(map(int, self.config.version.split('.'))) - removed_version = tuple(map(int, version_removed.split('.'))) - if current_version < removed_version: - versionlabels[self.name] = self._deprecated_label - versionlabel_classes[self.name] = 'deprecated' - else: - versionlabels[self.name] = self._removed_label - versionlabel_classes[self.name] = 'removed' - try: - return super().run() - finally: - # reset versionlabels and versionlabel_classes - versionlabels[self.name] = '' - versionlabel_classes[self.name] = '' - - -# Support for including Misc/NEWS - -issue_re = re.compile('(?:[Ii]ssue #|bpo-)([0-9]+)', re.I) -gh_issue_re = re.compile('(?:gh-issue-|gh-)([0-9]+)', re.I) -whatsnew_re = re.compile(r"(?im)^what's new in (.*?)\??$") - - -class MiscNews(SphinxDirective): - has_content = False - required_arguments = 1 - optional_arguments = 0 - final_argument_whitespace = False - option_spec = {} - - def run(self): - fname = self.arguments[0] - source = self.state_machine.input_lines.source( - self.lineno - self.state_machine.input_offset - 1) - source_dir = getenv('PY_MISC_NEWS_DIR') - if not source_dir: - source_dir = path.dirname(path.abspath(source)) - fpath = path.join(source_dir, fname) - self.env.note_dependency(path.abspath(fpath)) - try: - with io.open(fpath, encoding='utf-8') as fp: - content = fp.read() - except Exception: - text = 'The NEWS file is not available.' - node = nodes.strong(text, text) - return [node] - content = issue_re.sub(r':issue:`\1`', content) - # Fallback handling for the GitHub issue - content = gh_issue_re.sub(r':gh:`\1`', content) - content = whatsnew_re.sub(r'\1', content) - # remove first 3 lines as they are the main heading - lines = ['.. default-role:: obj', ''] + content.splitlines()[3:] - self.state_machine.insert_input(lines, fname) - return [] - - -# Support for building "topic help" for pydoc - -pydoc_topic_labels = [ - 'assert', 'assignment', 'assignment-expressions', 'async', 'atom-identifiers', - 'atom-literals', 'attribute-access', 'attribute-references', 'augassign', 'await', - 'binary', 'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object', - 'bltin-null-object', 'bltin-type-objects', 'booleans', - 'break', 'callable-types', 'calls', 'class', 'comparisons', 'compound', - 'context-managers', 'continue', 'conversions', 'customization', 'debugger', - 'del', 'dict', 'dynamic-features', 'else', 'exceptions', 'execmodel', - 'exprlists', 'floating', 'for', 'formatstrings', 'function', 'global', - 'id-classes', 'identifiers', 'if', 'imaginary', 'import', 'in', 'integers', - 'lambda', 'lists', 'naming', 'nonlocal', 'numbers', 'numeric-types', - 'objects', 'operator-summary', 'pass', 'power', 'raise', 'return', - 'sequence-types', 'shifting', 'slicings', 'specialattrs', 'specialnames', - 'string-methods', 'strings', 'subscriptions', 'truth', 'try', 'types', - 'typesfunctions', 'typesmapping', 'typesmethods', 'typesmodules', - 'typesseq', 'typesseq-mutable', 'unary', 'while', 'with', 'yield' -] - - -class PydocTopicsBuilder(Builder): - name = 'pydoc-topics' - - default_translator_class = TextTranslator - - def init(self): - self.topics = {} - self.secnumbers = {} - - def get_outdated_docs(self): - return 'all pydoc topics' - - def get_target_uri(self, docname, typ=None): - return '' # no URIs - - def write(self, *ignored): - writer = TextWriter(self) - for label in status_iterator(pydoc_topic_labels, - 'building topics... ', - length=len(pydoc_topic_labels)): - if label not in self.env.domaindata['std']['labels']: - self.env.logger.warning(f'label {label!r} not in documentation') - continue - docname, labelid, sectname = self.env.domaindata['std']['labels'][label] - doctree = self.env.get_and_resolve_doctree(docname, self) - document = new_document('
') - document.append(doctree.ids[labelid]) - destination = StringOutput(encoding='utf-8') - writer.write(document, destination) - self.topics[label] = writer.output - - def finish(self): - f = open(path.join(self.outdir, 'topics.py'), 'wb') - try: - f.write('# -*- coding: utf-8 -*-\n'.encode('utf-8')) - f.write(('# Autogenerated by Sphinx on %s\n' % asctime()).encode('utf-8')) - f.write('# as part of the release process.\n'.encode('utf-8')) - f.write(('topics = ' + pformat(self.topics) + '\n').encode('utf-8')) - finally: - f.close() - - # Support for documenting Opcodes opcode_sig_re = re.compile(r'(\w+(?:\+\d)?)(?:\s*\((.*)\))?') @@ -417,22 +220,13 @@ def setup(app): app.add_role('issue', issue_role) app.add_role('gh', gh_issue_role) app.add_directive('impl-detail', ImplementationDetail) - app.add_directive('versionadded', PyVersionChange, override=True) - app.add_directive('versionchanged', PyVersionChange, override=True) - app.add_directive('versionremoved', PyVersionChange, override=True) - app.add_directive('deprecated', PyVersionChange, override=True) - app.add_directive('deprecated-removed', DeprecatedRemoved) - app.add_builder(PydocTopicsBuilder) app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) app.add_object_type('pdbcommand', 'pdbcmd', '%s (pdb command)', parse_pdb_command) app.add_object_type('monitoring-event', 'monitoring-event', '%s (monitoring event)', parse_monitoring_event) - app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction) - app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod) app.add_directive_to_domain('py', 'coroutinefunction', PyCoroutineFunction) app.add_directive_to_domain('py', 'coroutinemethod', PyCoroutineMethod) app.add_directive_to_domain('py', 'awaitablefunction', PyAwaitableFunction) app.add_directive_to_domain('py', 'awaitablemethod', PyAwaitableMethod) app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod) - app.add_directive('miscnews', MiscNews) app.connect('env-check-consistency', patch_pairindextypes) return {'version': '1.0', 'parallel_read_safe': True} diff --git a/Doc/tools/templates/dummy.html b/Doc/tools/templates/dummy.html index 49c2a71a5e40cf..4f0f6f91436a87 100644 --- a/Doc/tools/templates/dummy.html +++ b/Doc/tools/templates/dummy.html @@ -7,6 +7,10 @@ {% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %} {% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %} +In extensions/availability.py: + +{% trans %}Availability{% endtrans %} + In extensions/c_annotations.py: {% trans %}Part of the{% endtrans %} diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 7db2f4820f346a..2a59cf3f62d4c5 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -1195,7 +1195,7 @@ conflict. .. envvar:: PYTHON_BASIC_REPL - If this variable is set to ``1``, the interpreter will not attempt to + If this variable is set to any value, the interpreter will not attempt to load the Python-based :term:`REPL` that requires :mod:`curses` and :mod:`readline`, and will instead use the traditional parser-based :term:`REPL`. diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 82df8dfc948ed2..629859e36cb654 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -58,7 +58,7 @@ Features and minimum versions required to build CPython: .. versionchanged:: 3.13 Autoconf 2.71, aclocal 1.16.5 and SQLite 3.15.2 are now required. -.. versionchanged:: next +.. versionchanged:: 3.14 Autoconf 2.72 is now required. See also :pep:`7` "Style Guide for C Code" and :pep:`11` "CPython platform @@ -311,6 +311,10 @@ General Options By convention, ``--enable-experimental-jit`` is a shorthand for ``--enable-experimental-jit=yes``. + .. note:: + + When building CPython with JIT enabled, ensure that your system has Python 3.11 or later installed. + .. versionadded:: 3.13 .. option:: PKG_CONFIG diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 6a0e483bd895d6..f2a8c711f4df88 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -1499,8 +1499,20 @@ All of the following modules were deprecated in Python 3.11, and are now removed: * :mod:`!aifc` + + * :pypi:`standard-aifc`: + Use the redistribution of ``aifc`` library from PyPI. + * :mod:`!audioop` + + * :pypi:`audioop-lts`: + Use ``audioop-lts`` library from PyPI. + * :mod:`!chunk` + + * :pypi:`standard-chunk`: + Use the redistribution of ``chunk`` library from PyPI. + * :mod:`!cgi` and :mod:`!cgitb` * :class:`!cgi.FieldStorage` can typically be replaced with @@ -1531,6 +1543,9 @@ and are now removed: For example, the :class:`email.message.EmailMessage` and :class:`email.message.Message` classes. + * :pypi:`standard-cgi`: and :pypi:`standard-cgitb`: + Use the redistribution of ``cgi`` and ``cgitb`` library from PyPI. + * :mod:`!crypt` and the private :mod:`!_crypt` extension. The :mod:`hashlib` module may be an appropriate replacement when simply hashing a value is required. @@ -1549,6 +1564,8 @@ and are now removed: Fork of the :mod:`!crypt` module, wrapper to the :manpage:`crypt_r(3)` library call and associated functionality. + * :pypi:`standard-crypt` and :pypi:`deprecated-crypt-alternative`: + Use the redistribution of ``crypt`` and reimplementation of ``_crypt`` libraries from PyPI. * :mod:`!imghdr`: The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries @@ -1556,30 +1573,65 @@ and are now removed: For example, the :func:`!puremagic.what` function can be used to replace the :func:`!imghdr.what` function for all file formats that were supported by :mod:`!imghdr`. + + * :pypi:`standard-imghdr`: + Use the redistribution of ``imghdr`` library from PyPI. + * :mod:`!mailcap`: Use the :mod:`mimetypes` module instead. + + * :pypi:`standard-mailcap`: + Use the redistribution of ``mailcap`` library from PyPI. + * :mod:`!msilib` * :mod:`!nis` * :mod:`!nntplib`: Use the :pypi:`pynntp` library from PyPI instead. + + * :pypi:`standard-nntplib`: + Use the redistribution of ``nntplib`` library from PyPI. + * :mod:`!ossaudiodev`: For audio playback, use the :pypi:`pygame` library from PyPI instead. * :mod:`!pipes`: Use the :mod:`subprocess` module instead. Use :func:`shlex.quote` to replace the undocumented ``pipes.quote`` function. + + * :pypi:`standard-pipes`: + Use the redistribution of ``pipes`` library from PyPI. + * :mod:`!sndhdr`: The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries should be used as replacements. + + * :pypi:`standard-sndhdr`: + Use the redistribution of ``sndhdr`` library from PyPI. + * :mod:`!spwd`: Use the :pypi:`python-pam` library from PyPI instead. * :mod:`!sunau` + + * :pypi:`standard-sunau`: + Use the redistribution of ``sunau`` library from PyPI. + * :mod:`!telnetlib`, Use the :pypi:`telnetlib3` or :pypi:`Exscript` libraries from PyPI instead. + + * :pypi:`standard-telnetlib`: + Use the redistribution of ``telnetlib`` library from PyPI. + * :mod:`!uu`: Use the :mod:`base64` module instead, as a modern alternative. + + * :pypi:`standard-uu`: + Use the redistribution of ``uu`` library from PyPI. + * :mod:`!xdrlib` + * :pypi:`standard-xdrlib`: + Use the redistribution of ``xdrlib`` library from PyPI. + (Contributed by Victor Stinner and Zachary Ware in :gh:`104773` and :gh:`104780`.) @@ -2706,6 +2758,33 @@ Changes in the C API Calling this function is redundant now that :c:func:`PyFrame_GetLocals` returns a write-through proxy for :term:`optimized scopes `. +* Python 3.13 removed many private functions. Some of them can be replaced using these + alternatives: + + * ``_PyDict_Pop()``: :c:func:`PyDict_Pop` or :c:func:`PyDict_PopString`; + * ``_PyDict_GetItemWithError()``: :c:func:`PyDict_GetItemRef`; + * ``_PyErr_WriteUnraisableMsg()``: :c:func:`PyErr_FormatUnraisable`; + * ``_PyEval_SetTrace()``: :c:func:`PyEval_SetTrace` or :c:func:`PyEval_SetTraceAllThreads`; + * ``_PyList_Extend()``: :c:func:`PyList_Extend`; + * ``_PyLong_AsInt()``: :c:func:`PyLong_AsInt`; + * ``_PyMem_RawStrdup()``: ``strdup()``; + * ``_PyMem_Strdup()``: ``strdup()``; + * ``_PyObject_ClearManagedDict()``: :c:func:`PyObject_ClearManagedDict`; + * ``_PyObject_VisitManagedDict()``: :c:func:`PyObject_VisitManagedDict`; + * ``_PyThreadState_UncheckedGet()``: :c:func:`PyThreadState_GetUnchecked()`; + * ``_PyTime_AsSecondsDouble()``: :c:func:`PyTime_AsSecondsDouble`; + * ``_PyTime_GetMonotonicClock()``: :c:func:`PyTime_Monotonic` or :c:func:`PyTime_MonotonicRaw`; + * ``_PyTime_GetPerfCounter()``: :c:func:`PyTime_PerfCounter` or :c:func:`PyTime_PerfCounterRaw`; + * ``_PyTime_GetSystemClock()``: :c:func:`PyTime_Time` or :c:func:`PyTime_TimeRaw`; + * ``_PyTime_MAX``: :c:var:`PyTime_MAX`; + * ``_PyTime_MIN``: :c:var:`PyTime_MIN`; + * ``_PyTime_t``: :c:type:`PyTime_t`; + * ``_Py_HashPointer()``: :c:func:`Py_HashPointer`; + * ``_Py_IsFinalizing()``: :c:func:`Py_IsFinalizing`. + + The `pythoncapi-compat project`_ can be used to get most of these new + functions on Python 3.12 and older. + Regression Test Changes ======================= diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index f365db37217e95..59c432d30a342b 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -67,6 +67,7 @@ Summary -- release highlights * :ref:`PEP 649: deferred evaluation of annotations ` * :ref:`PEP 741: Python Configuration C API ` +* :ref:`PEP 761: Discontinuation of PGP signatures ` New features @@ -350,6 +351,12 @@ ctypes callback functions that are passed dynamically-sized buffers. (Contributed by Rian Hunter in :gh:`112018`.) +* Complex types, :class:`~ctypes.c_float_complex`, + :class:`~ctypes.c_double_complex` and :class:`~ctypes.c_longdouble_complex`, + are now available if both the compiler and the ``libffi`` library support + complex C types. + (Contributed by Sergey B Kirpichev in :gh:`61103`). + datetime -------- @@ -554,6 +561,10 @@ os to the :mod:`os` module. (Contributed by James Roy in :gh:`127688`.) +* Add the :func:`os.readinto` function to read into a + :ref:`buffer object ` from a file descriptor. + (Contributed by Cody Maloney in :gh:`129205`.) + pathlib ------- @@ -583,13 +594,6 @@ pdb command when :mod:`pdb` is in ``inline`` mode. (Contributed by Tian Gao in :gh:`123757`.) -platform --------- - -* Add :func:`platform.invalidate_caches` to invalidate the cached results. - - (Contributed by Bénédikt Tran in :gh:`122549`.) - pickle ------ @@ -601,6 +605,15 @@ pickle of the error. (Contributed by Serhiy Storchaka in :gh:`122213`.) + +platform +-------- + +* Add :func:`platform.invalidate_caches` to invalidate the cached results. + + (Contributed by Bénédikt Tran in :gh:`122549`.) + + pydoc ----- @@ -636,12 +649,26 @@ sys which only exists in specialized builds of Python, may now return objects from other interpreters than the one it's called in. +* Add :func:`sys._is_immortal` for determining if an object is :term:`immortal`. + (Contributed by Peter Bierma in :gh:`128509`.) + +* On FreeBSD, :data:`sys.platform` doesn't contain the major version anymore. + It is always ``'freebsd'``, instead of ``'freebsd13'`` or ``'freebsd14'``. + + sys.monitoring -------------- * Two new events are added: :monitoring-event:`BRANCH_LEFT` and :monitoring-event:`BRANCH_RIGHT`. The ``BRANCH`` event is deprecated. +threading +--------- + +* :meth:`threading.Thread.start` now sets the operating system thread name + to :attr:`threading.Thread.name`. + (Contributed by Victor Stinner in :gh:`59705`.) + tkinter ------- @@ -650,6 +677,14 @@ tkinter (Contributed by Zhikang Yan in :gh:`126899`.) +turtle +------ + +* Add context managers for :func:`turtle.fill`, :func:`turtle.poly` + and :func:`turtle.no_animation`. + (Contributed by Marie Roald and Yngve Mardal Moe in :gh:`126350`.) + + unicodedata ----------- @@ -670,6 +705,23 @@ unittest directory again. It was removed in Python 3.11. (Contributed by Jacob Walls in :gh:`80958`.) +* A number of new methods were added in the :class:`~unittest.TestCase` class + that provide more specialized tests. + + - :meth:`~unittest.TestCase.assertHasAttr` and + :meth:`~unittest.TestCase.assertNotHasAttr` check whether the object + has a particular attribute. + - :meth:`~unittest.TestCase.assertIsSubclass` and + :meth:`~unittest.TestCase.assertNotIsSubclass` check whether the object + is a subclass of a particular class, or of one of a tuple of classes. + - :meth:`~unittest.TestCase.assertStartsWith`, + :meth:`~unittest.TestCase.assertNotStartsWith`, + :meth:`~unittest.TestCase.assertEndsWith` and + :meth:`~unittest.TestCase.assertNotEndsWith` check whether the Unicode + or byte string starts or ends with particular string(s). + + (Contributed by Serhiy Storchaka in :gh:`71339`.) + urllib ------ @@ -686,6 +738,11 @@ uuid in :rfc:`9562`. (Contributed by Bénédikt Tran in :gh:`89083`.) +* :const:`uuid.NIL` and :const:`uuid.MAX` are now available to represent the + Nil and Max UUID formats as defined by :rfc:`9562`. + (Contributed by Nick Pope in :gh:`128427`.) + + zipinfo ------- @@ -695,6 +752,12 @@ zipinfo (Contributed by Bénédikt Tran in :gh:`123424`.) +* :meth:`zipfile.ZipFile.writestr` now respect ``SOURCE_DATE_EPOCH`` that + distributions can set centrally and have build tools consume this in order + to produce reproducible output. + + (Contributed by Jiahao Li in :gh:`91279`.) + .. Add improved modules above alphabetically, not here at the end. Optimizations @@ -708,6 +771,20 @@ asyncio reduces memory usage. (Contributed by Kumar Aditya in :gh:`107803`.) +* :mod:`asyncio` has new utility functions for introspecting and printing + the program's call graph: :func:`asyncio.capture_call_graph` and + :func:`asyncio.print_call_graph`. + (Contributed by Yury Selivanov, Pablo Galindo Salgado, and Åukasz Langa + in :gh:`91048`.) + +base64 +------ + +* Improve the performance of :func:`base64.b16decode` by up to ten times, + and reduce the import time of :mod:`base64` by up to six times. + (Contributed by Bénédikt Tran, Chris Markiewicz, and Adam Turner in :gh:`118761`.) + + io --- * :mod:`io` which provides the built-in :func:`open` makes less system calls @@ -717,6 +794,22 @@ io file's bytes in full. (Contributed by Cody Maloney and Victor Stinner in :gh:`120754` and :gh:`90102`.) + +uuid +---- + +* Improve generation of :class:`~uuid.UUID` objects via their dedicated + functions: + + * :func:`~uuid.uuid3` and :func:`~uuid.uuid5` are both roughly 40% faster + for 16-byte names and 20% faster for 1024-byte names. Performance for + longer names remains unchanged. + * :func:`~uuid.uuid4` and :func:`~uuid.uuid8` are 30% and 40% faster + respectively. + + (Contributed by Bénédikt Tran in :gh:`128150`.) + + Deprecated ========== @@ -1090,9 +1183,11 @@ Changes in the Python API Build changes ============= -* GNU Autoconf 2.72 is now required to generate :file:`!configure`. +* GNU Autoconf 2.72 is now required to generate :file:`configure`. (Contributed by Erlend Aasland in :gh:`115765`.) +.. _whatsnew314-pep761: + PEP 761: Discontinuation of PGP signatures ------------------------------------------ @@ -1135,16 +1230,6 @@ New features which has an ambiguous return value. (Contributed by Irit Katriel and Erlend Aasland in :gh:`105201`.) -* :c:func:`Py_Finalize` now deletes all interned strings. This - is backwards incompatible to any C-Extension that holds onto an interned - string after a call to :c:func:`Py_Finalize` and is then reused after a - call to :c:func:`Py_Initialize`. Any issues arising from this behavior will - normally result in crashes during the execution of the subsequent call to - :c:func:`Py_Initialize` from accessing uninitialized memory. To fix, use - an address sanitizer to identify any use-after-free coming from - an interned string and deallocate it during module shutdown. - (Contributed by Eddie Elizondo in :gh:`113601`.) - * Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` and :c:func:`PyLong_IsZero` for checking if :c:type:`PyLongObject` is positive, negative, or zero, respectively. @@ -1209,7 +1294,7 @@ New features * :c:func:`PyLongWriter_Finish`; * :c:func:`PyLongWriter_Discard`. - (Contributed by Victor Stinner in :gh:`102471`.) + (Contributed by Sergey B Kirpichev and Victor Stinner in :gh:`102471`.) * Add :c:func:`PyType_GetBaseByToken` and :c:data:`Py_tp_token` slot for easier superclass identification, which attempts to resolve the `type checking issue @@ -1220,32 +1305,92 @@ New features test if two strings are equal. (Contributed by Victor Stinner in :gh:`124502`.) - * Add :c:func:`PyType_Freeze` function to make a type immutable. (Contributed by Victor Stinner in :gh:`121654`.) * Add :c:func:`PyUnstable_Object_EnableDeferredRefcount` for enabling deferred reference counting, as outlined in :pep:`703`. -* The :ref:`Unicode Exception Objects ` C API - now raises a :exc:`TypeError` if its exception argument is not - a :exc:`UnicodeError` object. - (Contributed by Bénédikt Tran in :gh:`127691`.) - * Add :c:func:`PyMonitoring_FireBranchLeftEvent` and :c:func:`PyMonitoring_FireBranchRightEvent` for generating :monitoring-event:`BRANCH_LEFT` and :monitoring-event:`BRANCH_RIGHT` events, respectively. +* Add :c:func:`Py_fopen` function to open a file. Similar to the + :c:func:`!fopen` function, but the *path* parameter is a Python object and an + exception is set on error. Add also :c:func:`Py_fclose` function to close a + file. + (Contributed by Victor Stinner in :gh:`127350`.) -Porting to Python 3.14 ----------------------- +* Add macros :c:func:`Py_PACK_VERSION` and :c:func:`Py_PACK_FULL_VERSION` for + bit-packing Python version numbers. + (Contributed by Petr Viktorin in :gh:`128629`.) + +* Add :c:func:`PyUnstable_IsImmortal` for determining whether an object is :term:`immortal`, + for debugging purposes. + +* Add :c:func:`PyImport_ImportModuleAttr` and + :c:func:`PyImport_ImportModuleAttrString` helper functions to import a module + and get an attribute of the module. + (Contributed by Victor Stinner in :gh:`128911`.) + + +Limited C API changes +--------------------- * In the limited C API 3.14 and newer, :c:func:`Py_TYPE` and :c:func:`Py_REFCNT` are now implemented as an opaque function call to hide implementation details. (Contributed by Victor Stinner in :gh:`120600` and :gh:`124127`.) +* Remove :c:func:`PySequence_Fast` from the limited C API, since this function + has to be used with :c:macro:`PySequence_Fast_GET_ITEM` which never worked + in the limited C API. + (Contributed by Victor Stinner in :gh:`91417`.) + + +Porting to Python 3.14 +---------------------- + +* :c:func:`Py_Finalize` now deletes all interned strings. This + is backwards incompatible to any C-Extension that holds onto an interned + string after a call to :c:func:`Py_Finalize` and is then reused after a + call to :c:func:`Py_Initialize`. Any issues arising from this behavior will + normally result in crashes during the execution of the subsequent call to + :c:func:`Py_Initialize` from accessing uninitialized memory. To fix, use + an address sanitizer to identify any use-after-free coming from + an interned string and deallocate it during module shutdown. + (Contributed by Eddie Elizondo in :gh:`113601`.) + +* The :ref:`Unicode Exception Objects ` C API + now raises a :exc:`TypeError` if its exception argument is not + a :exc:`UnicodeError` object. + (Contributed by Bénédikt Tran in :gh:`127691`.) + +* Private functions promoted to public C APIs: + + * ``_PyBytes_Join()``: :c:func:`PyBytes_Join`; + * ``_PyLong_IsNegative()``: :c:func:`PyLong_IsNegative`; + * ``_PyLong_IsPositive()``: :c:func:`PyLong_IsPositive`; + * ``_PyLong_IsZero()``: :c:func:`PyLong_IsZero`; + * ``_PyLong_Sign()``: :c:func:`PyLong_GetSign`; + * ``_PyUnicodeWriter_Dealloc()``: :c:func:`PyUnicodeWriter_Discard`; + * ``_PyUnicodeWriter_Finish()``: :c:func:`PyUnicodeWriter_Finish`; + * ``_PyUnicodeWriter_Init()``: :c:func:`PyUnicodeWriter_Create`; + * ``_PyUnicodeWriter_WriteChar()``: :c:func:`PyUnicodeWriter_WriteChar`; + * ``_PyUnicodeWriter_WriteStr()``: :c:func:`PyUnicodeWriter_WriteStr`; + * ``_PyUnicodeWriter_WriteSubstring()``: :c:func:`PyUnicodeWriter_WriteSubstring`; + * ``_PyUnicode_EQ()``: :c:func:`PyUnicode_Equal`; + * ``_PyUnicode_Equal()``: :c:func:`PyUnicode_Equal`; + * ``_Py_GetConfig()``: :c:func:`PyConfig_Get` and :c:func:`PyConfig_GetInt`; + * ``_Py_HashBytes()``: :c:func:`Py_HashBuffer`; + * ``_Py_fopen_obj()``: :c:func:`Py_fopen`. + + The `pythoncapi-compat project`_ can be used to get most of these new + functions on Python 3.13 and older. + +.. _pythoncapi-compat project: https://github.com/python/pythoncapi-compat/ + Deprecated ---------- @@ -1266,16 +1411,47 @@ Deprecated .. Add C API deprecations above alphabetically, not here at the end. +* The ``PyMonitoring_FireBranchEvent`` function is deprecated and should + be replaced with calls to :c:func:`PyMonitoring_FireBranchLeftEvent` + and :c:func:`PyMonitoring_FireBranchRightEvent`. + +* The following private functions are deprecated and planned for removal in + Python 3.18: + + * :c:func:`!_PyBytes_Join`: use :c:func:`PyBytes_Join`. + * :c:func:`!_PyDict_GetItemStringWithError`: use :c:func:`PyDict_GetItemStringRef`. + * :c:func:`!_PyDict_Pop()`: use :c:func:`PyDict_Pop`. + * :c:func:`!_PyLong_Sign()`: use :c:func:`PyLong_GetSign`. + * :c:func:`!_PyLong_FromDigits` and :c:func:`!_PyLong_New`: + use :c:func:`PyLongWriter_Create`. + * :c:func:`!_PyThreadState_UncheckedGet`: use :c:func:`PyThreadState_GetUnchecked`. + * :c:func:`!_PyUnicode_AsString`: use :c:func:`PyUnicode_AsUTF8`. + * :c:func:`!_Py_HashPointer`: use :c:func:`Py_HashPointer`. + * :c:func:`!_Py_fopen_obj`: use :c:func:`Py_fopen`. + + The `pythoncapi-compat project`_ can be used to get these new public + functions on Python 3.13 and older. + + (Contributed by Victor Stinner in :gh:`128863`.) + + .. include:: ../deprecations/c-api-pending-removal-in-3.15.rst +.. include:: ../deprecations/c-api-pending-removal-in-3.18.rst + .. include:: ../deprecations/c-api-pending-removal-in-future.rst -* The ``PyMonitoring_FireBranchEvent`` function is deprecated and should - be replaced with calls to :c:func:`PyMonitoring_FireBranchLeftEvent` - and :c:func:`PyMonitoring_FireBranchRightEvent`. Removed ------- * Creating :c:data:`immutable types ` with mutable bases was deprecated since 3.12 and now raises a :exc:`TypeError`. + +* Remove ``PyDictObject.ma_version_tag`` member which was deprecated since + Python 3.12. Use the :c:func:`PyDict_AddWatcher` API instead. + (Contributed by Sam Gross in :gh:`124296`.) + +* Remove the private ``_Py_InitializeMain()`` function. It was a + :term:`provisional API` added to Python 3.8 by :pep:`587`. + (Contributed by Victor Stinner in :gh:`129033`.) diff --git a/Doc/whatsnew/changelog.rst b/Doc/whatsnew/changelog.rst index b4356143659031..e796d4157cec76 100644 --- a/Doc/whatsnew/changelog.rst +++ b/Doc/whatsnew/changelog.rst @@ -1,5 +1,7 @@ .. _changelog: +.. default-role:: py:obj + +++++++++ Changelog +++++++++ diff --git a/Include/abstract.h b/Include/abstract.h index 7cfee1332ccaa4..4efe4fcb014903 100644 --- a/Include/abstract.h +++ b/Include/abstract.h @@ -726,31 +726,6 @@ PyAPI_FUNC(PyObject *) PySequence_Tuple(PyObject *o); This is equivalent to the Python expression: list(o) */ PyAPI_FUNC(PyObject *) PySequence_List(PyObject *o); -/* Return the sequence 'o' as a list, unless it's already a tuple or list. - - Use PySequence_Fast_GET_ITEM to access the members of this list, and - PySequence_Fast_GET_SIZE to get its length. - - Returns NULL on failure. If the object does not support iteration, raises a - TypeError exception with 'm' as the message text. */ -PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m); - -/* Return the size of the sequence 'o', assuming that 'o' was returned by - PySequence_Fast and is not NULL. */ -#define PySequence_Fast_GET_SIZE(o) \ - (PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o)) - -/* Return the 'i'-th element of the sequence 'o', assuming that o was returned - by PySequence_Fast, and that i is within bounds. */ -#define PySequence_Fast_GET_ITEM(o, i)\ - (PyList_Check(o) ? PyList_GET_ITEM((o), (i)) : PyTuple_GET_ITEM((o), (i))) - -/* Return a pointer to the underlying item array for - an object returned by PySequence_Fast */ -#define PySequence_Fast_ITEMS(sf) \ - (PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item \ - : ((PyTupleObject *)(sf))->ob_item) - /* Return the number of occurrences on value on 'o', that is, return the number of keys for which o[key] == value. diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h index 4e7b7a46703a6d..8fed1d3110988b 100644 --- a/Include/cpython/abstract.h +++ b/Include/cpython/abstract.h @@ -85,3 +85,29 @@ PyAPI_FUNC(Py_ssize_t) PyObject_LengthHint(PyObject *o, Py_ssize_t); need to be corrected for a negative index. */ #define PySequence_ITEM(o, i)\ ( Py_TYPE(o)->tp_as_sequence->sq_item((o), (i)) ) + +/* Return the sequence 'o' as a list, unless it's already a tuple or list. + + Use PySequence_Fast_GET_ITEM to access the members of this list, and + PySequence_Fast_GET_SIZE to get its length. + + Returns NULL on failure. If the object does not support iteration, raises a + TypeError exception with 'm' as the message text. */ +PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m); + +/* Return the size of the sequence 'o', assuming that 'o' was returned by + PySequence_Fast and is not NULL. */ +#define PySequence_Fast_GET_SIZE(o) \ + (PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o)) + +/* Return the 'i'-th element of the sequence 'o', assuming that o was returned + by PySequence_Fast, and that i is within bounds. */ +#define PySequence_Fast_GET_ITEM(o, i)\ + (PyList_Check(o) ? PyList_GET_ITEM((o), (i)) : PyTuple_GET_ITEM((o), (i))) + +/* Return a pointer to the underlying item array for + an object returned by PySequence_Fast */ +#define PySequence_Fast_ITEMS(sf) \ + (PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item \ + : ((PyTupleObject *)(sf))->ob_item) + diff --git a/Include/cpython/bytesobject.h b/Include/cpython/bytesobject.h index cf3f0387ecf323..71c133f173f157 100644 --- a/Include/cpython/bytesobject.h +++ b/Include/cpython/bytesobject.h @@ -34,5 +34,9 @@ static inline Py_ssize_t PyBytes_GET_SIZE(PyObject *op) { PyAPI_FUNC(PyObject*) PyBytes_Join(PyObject *sep, PyObject *iterable); -// Alias kept for backward compatibility -#define _PyBytes_Join PyBytes_Join +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline PyObject* +_PyBytes_Join(PyObject *sep, PyObject *iterable) +{ + return PyBytes_Join(sep, iterable); +} diff --git a/Include/cpython/code.h b/Include/cpython/code.h index cb6261ddde941b..2bd3e08631f0ad 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -35,11 +35,12 @@ typedef struct { } _PyCoCached; /* Ancillary data structure used for instrumentation. - Line instrumentation creates an array of - these. One entry per code unit.*/ + Line instrumentation creates this with sufficient + space for one entry per code unit. The total size + of the data will be `bytes_per_entry * Py_SIZE(code)` */ typedef struct { - uint8_t original_opcode; - int8_t line_delta; + uint8_t bytes_per_entry; + uint8_t data[1]; } _PyCoLineInstrumentationData; diff --git a/Include/cpython/dictobject.h b/Include/cpython/dictobject.h index 78473e54898fa5..df9ec7050fca1a 100644 --- a/Include/cpython/dictobject.h +++ b/Include/cpython/dictobject.h @@ -68,7 +68,12 @@ PyAPI_FUNC(PyObject *) _PyDict_NewPresized(Py_ssize_t minused); PyAPI_FUNC(int) PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result); PyAPI_FUNC(int) PyDict_PopString(PyObject *dict, const char *key, PyObject **result); -PyAPI_FUNC(PyObject *) _PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value); + +// Use PyDict_Pop() instead +Py_DEPRECATED(3.14) PyAPI_FUNC(PyObject *) _PyDict_Pop( + PyObject *dict, + PyObject *key, + PyObject *default_value); /* Dictionary watchers */ diff --git a/Include/cpython/fileutils.h b/Include/cpython/fileutils.h index b386ad107bde1f..626b1ad57b3846 100644 --- a/Include/cpython/fileutils.h +++ b/Include/cpython/fileutils.h @@ -2,7 +2,15 @@ # error "this header file must not be included directly" #endif -// Used by _testcapi which must not use the internal C API -PyAPI_FUNC(FILE*) _Py_fopen_obj( +PyAPI_FUNC(FILE*) Py_fopen( PyObject *path, const char *mode); + +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline FILE* +_Py_fopen_obj(PyObject *path, const char *mode) +{ + return Py_fopen(path, mode); +} + +PyAPI_FUNC(int) Py_fclose(FILE *file); diff --git a/Include/cpython/import.h b/Include/cpython/import.h index 7daf0b84fcf71b..0ce0b1ee6cce2a 100644 --- a/Include/cpython/import.h +++ b/Include/cpython/import.h @@ -2,8 +2,6 @@ # error "this header file must not be included directly" #endif -PyMODINIT_FUNC PyInit__imp(void); - struct _inittab { const char *name; /* ASCII encoded string */ PyObject* (*initfunc)(void); @@ -23,3 +21,10 @@ struct _frozen { collection of frozen modules: */ PyAPI_DATA(const struct _frozen *) PyImport_FrozenModules; + +PyAPI_FUNC(PyObject*) PyImport_ImportModuleAttr( + PyObject *mod_name, + PyObject *attr_name); +PyAPI_FUNC(PyObject*) PyImport_ImportModuleAttrString( + const char *mod_name, + const char *attr_name); diff --git a/Include/cpython/longintrepr.h b/Include/cpython/longintrepr.h index 357477b60d9a5a..4b6f97a5e475d6 100644 --- a/Include/cpython/longintrepr.h +++ b/Include/cpython/longintrepr.h @@ -76,8 +76,8 @@ typedef long stwodigits; /* signed variant of twodigits */ - 1: Zero - 2: Negative - The third lowest bit of lv_tag is reserved for an immortality flag, but is - not currently used. + The third lowest bit of lv_tag is + set to 1 for the small ints. In a normalized number, ob_digit[ndigits-1] (the most significant digit) is never zero. Also, in all cases, for all valid i, @@ -100,12 +100,12 @@ struct _longobject { _PyLongValue long_value; }; -PyAPI_FUNC(PyLongObject*) _PyLong_New(Py_ssize_t); +Py_DEPRECATED(3.14) PyAPI_FUNC(PyLongObject*) _PyLong_New(Py_ssize_t); // Return a copy of src. PyAPI_FUNC(PyObject*) _PyLong_Copy(PyLongObject *src); -PyAPI_FUNC(PyLongObject*) _PyLong_FromDigits( +Py_DEPRECATED(3.14) PyAPI_FUNC(PyLongObject*) _PyLong_FromDigits( int negative, Py_ssize_t digit_count, digit *digits); diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index 4d6e618f831ad8..7f28ad60b7467b 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -86,7 +86,7 @@ PyAPI_FUNC(int) PyLong_IsZero(PyObject *obj); - On failure, set an exception, and return -1. */ PyAPI_FUNC(int) PyLong_GetSign(PyObject *v, int *sign); -PyAPI_FUNC(int) _PyLong_Sign(PyObject *v); +Py_DEPRECATED(3.14) PyAPI_FUNC(int) _PyLong_Sign(PyObject *v); /* _PyLong_NumBits. Return the number of bits needed to represent the absolute value of a long. For example, this returns 1 for 1 and -1, 2 diff --git a/Include/cpython/object.h b/Include/cpython/object.h index e4797029da431e..71bd01884426ad 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -221,7 +221,9 @@ struct _typeobject { PyObject *tp_weaklist; /* not used for static builtin types */ destructor tp_del; - /* Type attribute cache version tag. Added in version 2.6 */ + /* Type attribute cache version tag. Added in version 2.6. + * If zero, the cache is invalid and must be initialized. + */ unsigned int tp_version_tag; destructor tp_finalize; @@ -229,9 +231,17 @@ struct _typeobject { /* bitset of which type-watchers care about this type */ unsigned char tp_watched; + + /* Number of tp_version_tag values used. + * Set to _Py_ATTR_CACHE_UNUSED if the attribute cache is + * disabled for this type (e.g. due to custom MRO entries). + * Otherwise, limited to MAX_VERSIONS_PER_CLASS (defined elsewhere). + */ uint16_t tp_versions_used; }; +#define _Py_ATTR_CACHE_UNUSED (30000) // (see tp_versions_used) + /* This struct is used by the specializer * It should be treated as an opaque blob * by code other than the specializer and interpreter. */ @@ -465,9 +475,6 @@ partially-deallocated object. To check this, the tp_dealloc function must be passed as second argument to Py_TRASHCAN_BEGIN(). */ -/* Python 3.9 private API, invoked by the macros below. */ -PyAPI_FUNC(int) _PyTrash_begin(PyThreadState *tstate, PyObject *op); -PyAPI_FUNC(void) _PyTrash_end(PyThreadState *tstate); PyAPI_FUNC(void) _PyTrash_thread_deposit_object(PyThreadState *tstate, PyObject *op); PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(PyThreadState *tstate); @@ -534,3 +541,12 @@ PyAPI_FUNC(PyRefTracer) PyRefTracer_GetTracer(void**); * 0 if the runtime ignored it. This function cannot fail. */ PyAPI_FUNC(int) PyUnstable_Object_EnableDeferredRefcount(PyObject *); + +/* Check whether the object is immortal. This cannot fail. */ +PyAPI_FUNC(int) PyUnstable_IsImmortal(PyObject *); + +// Increments the reference count of the object, if it's not zero. +// PyUnstable_EnableTryIncRef() should be called on the object +// before calling this function in order to avoid spurious failures. +PyAPI_FUNC(int) PyUnstable_TryIncRef(PyObject *); +PyAPI_FUNC(void) PyUnstable_EnableTryIncRef(PyObject *); diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 6d106c1b499c69..2a0c11e7b3ad66 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -574,15 +574,15 @@ static inline void _Py_atomic_fence_release(void); #if _Py_USE_GCC_BUILTIN_ATOMICS # define Py_ATOMIC_GCC_H -# include "cpython/pyatomic_gcc.h" +# include "pyatomic_gcc.h" # undef Py_ATOMIC_GCC_H #elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_ATOMICS__) # define Py_ATOMIC_STD_H -# include "cpython/pyatomic_std.h" +# include "pyatomic_std.h" # undef Py_ATOMIC_STD_H #elif defined(_MSC_VER) # define Py_ATOMIC_MSC_H -# include "cpython/pyatomic_msc.h" +# include "pyatomic_msc.h" # undef Py_ATOMIC_MSC_H #else # error "no available pyatomic implementation for this platform/compiler" diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index 49a6265e5eb02f..b36b4681f5dddb 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -94,12 +94,6 @@ PyAPI_FUNC(void) _PyErr_ChainExceptions1(PyObject *); /* In exceptions.c */ -PyAPI_FUNC(int) _PyUnicodeError_GetParams( - PyObject *self, - PyObject **obj, Py_ssize_t *objlen, - Py_ssize_t *start, Py_ssize_t *end, - int as_bytes); - PyAPI_FUNC(PyObject*) PyUnstable_Exc_PrepReraiseStar( PyObject *orig, PyObject *excs); diff --git a/Include/cpython/pyhash.h b/Include/cpython/pyhash.h index 876a7f0ea44f4d..a33ba10b8d3a37 100644 --- a/Include/cpython/pyhash.h +++ b/Include/cpython/pyhash.h @@ -29,9 +29,6 @@ /* Helpers for hash functions */ PyAPI_FUNC(Py_hash_t) _Py_HashDouble(PyObject *, double); -// Kept for backward compatibility -#define _Py_HashPointer Py_HashPointer - /* hash function definition */ typedef struct { @@ -44,6 +41,14 @@ typedef struct { PyAPI_FUNC(PyHash_FuncDef*) PyHash_GetFuncDef(void); PyAPI_FUNC(Py_hash_t) Py_HashPointer(const void *ptr); + +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline Py_hash_t +_Py_HashPointer(const void *ptr) +{ + return Py_HashPointer(ptr); +} + PyAPI_FUNC(Py_hash_t) PyObject_GenericHash(PyObject *); PyAPI_FUNC(Py_hash_t) Py_HashBuffer(const void *ptr, Py_ssize_t len); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index e46dfe59ec4630..86ce6e6f79824a 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -25,9 +25,6 @@ PyAPI_FUNC(PyStatus) Py_PreInitializeFromArgs( PyAPI_FUNC(PyStatus) Py_InitializeFromConfig( const PyConfig *config); -// Python 3.8 provisional API (PEP 587) -PyAPI_FUNC(PyStatus) _Py_InitializeMain(void); - PyAPI_FUNC(int) Py_RunMain(void); diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 32f68378ea5d72..cd6d9582496850 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -239,8 +239,12 @@ struct _ts { * if it is NULL. */ PyAPI_FUNC(PyThreadState *) PyThreadState_GetUnchecked(void); -// Alias kept for backward compatibility -#define _PyThreadState_UncheckedGet PyThreadState_GetUnchecked +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline PyThreadState* +_PyThreadState_UncheckedGet(void) +{ + return PyThreadState_GetUnchecked(); +} // Disable tracing and profiling. diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h index 29ef0c0e4d4e72..f52348e42b1330 100644 --- a/Include/cpython/pystats.h +++ b/Include/cpython/pystats.h @@ -31,7 +31,7 @@ #define PYSTATS_MAX_UOP_ID 512 -#define SPECIALIZATION_FAILURE_KINDS 36 +#define SPECIALIZATION_FAILURE_KINDS 37 /* Stats for determining who is calling PyEval_EvalFrame */ #define EVAL_CALL_TOTAL 0 @@ -141,6 +141,14 @@ typedef struct _optimization_stats { uint64_t remove_globals_builtins_changed; uint64_t remove_globals_incorrect_keys; uint64_t error_in_opcode[PYSTATS_MAX_UOP_ID + 1]; + // JIT memory stats + uint64_t jit_total_memory_size; + uint64_t jit_code_size; + uint64_t jit_trampoline_size; + uint64_t jit_data_size; + uint64_t jit_padding_size; + uint64_t jit_freed_memory_size; + uint64_t trace_total_memory_hist[_Py_UOP_HIST_SIZE]; } OptimizationStats; typedef struct _rare_event_stats { diff --git a/Include/cpython/pythread.h b/Include/cpython/pythread.h index 03f710a9f7ef2e..e658b35bd90700 100644 --- a/Include/cpython/pythread.h +++ b/Include/cpython/pythread.h @@ -22,7 +22,7 @@ PyAPI_DATA(const long long) PY_TIMEOUT_MAX; */ # define NATIVE_TSS_KEY_T unsigned long #elif defined(HAVE_PTHREAD_STUBS) -# include "cpython/pthread_stubs.h" +# include "pthread_stubs.h" # define NATIVE_TSS_KEY_T pthread_key_t #else # error "Require native threads. See https://bugs.python.org/issue31370" diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 46a01c8e591709..cea69dd1280999 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -240,6 +240,8 @@ enum PyUnicode_Kind { PyUnicode_4BYTE_KIND = 4 }; +PyAPI_FUNC(int) PyUnicode_KIND(PyObject *op); + // PyUnicode_KIND(): Return one of the PyUnicode_*_KIND values defined above. // // gh-89653: Converting this macro to a static inline function would introduce @@ -264,13 +266,15 @@ static inline void* _PyUnicode_NONCOMPACT_DATA(PyObject *op) { return data; } -static inline void* PyUnicode_DATA(PyObject *op) { +PyAPI_FUNC(void*) PyUnicode_DATA(PyObject *op); + +static inline void* _PyUnicode_DATA(PyObject *op) { if (PyUnicode_IS_COMPACT(op)) { return _PyUnicode_COMPACT_DATA(op); } return _PyUnicode_NONCOMPACT_DATA(op); } -#define PyUnicode_DATA(op) PyUnicode_DATA(_PyObject_CAST(op)) +#define PyUnicode_DATA(op) _PyUnicode_DATA(_PyObject_CAST(op)) /* Return pointers to the canonical representation cast to unsigned char, Py_UCS2, or Py_UCS4 for direct character access. @@ -630,8 +634,12 @@ _PyUnicodeWriter_Dealloc(_PyUnicodeWriter *writer); PyAPI_FUNC(const char *) PyUnicode_AsUTF8(PyObject *unicode); -// Alias kept for backward compatibility -#define _PyUnicode_AsString PyUnicode_AsUTF8 +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline const char* +_PyUnicode_AsString(PyObject *unicode) +{ + return PyUnicode_AsUTF8(unicode); +} /* === Characters Type APIs =============================================== */ diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 80bd19a887871c..fea8665ae39ab5 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -264,7 +264,7 @@ PyAPI_DATA(const size_t) _Py_FunctionAttributeOffsets[]; PyAPI_FUNC(int) _PyEval_CheckExceptStarTypeValid(PyThreadState *tstate, PyObject* right); PyAPI_FUNC(int) _PyEval_CheckExceptTypeValid(PyThreadState *tstate, PyObject* right); -PyAPI_FUNC(int) _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, PyObject **match, PyObject **rest); +PyAPI_FUNC(int) _PyEval_ExceptionGroupMatch(_PyInterpreterFrame *, PyObject* exc_value, PyObject *match_type, PyObject **match, PyObject **rest); PyAPI_FUNC(void) _PyEval_FormatAwaitableError(PyThreadState *tstate, PyTypeObject *type, int oparg); PyAPI_FUNC(void) _PyEval_FormatExcCheckArg(PyThreadState *tstate, PyObject *exc, const char *format_str, PyObject *obj); PyAPI_FUNC(void) _PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *co, int oparg); diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index d97fe81a2fc54a..65c3d142458577 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -100,6 +100,7 @@ typedef struct { typedef struct { _Py_BackoffCounter counter; + uint16_t external_cache[4]; } _PyBinaryOpCache; #define INLINE_CACHE_ENTRIES_BINARY_OP CACHE_ENTRIES(_PyBinaryOpCache) @@ -372,6 +373,7 @@ extern void _Py_Specialize_ContainsOp(_PyStackRef value, _Py_CODEUNIT *instr); do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0) #define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0) #define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0) +#define OPT_STAT_ADD(name, n) do { if (_Py_stats) _Py_stats->optimization_stats.name += (n); } while (0) #define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0) #define UOP_PAIR_INC(uopcode, lastuop) \ do { \ @@ -407,6 +409,7 @@ PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void); #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0) #define GC_STAT_ADD(gen, name, n) ((void)0) #define OPT_STAT_INC(name) ((void)0) +#define OPT_STAT_ADD(name, n) ((void)0) #define UOP_STAT_INC(opname, name) ((void)0) #define UOP_PAIR_INC(uopcode, lastuop) ((void)0) #define OPT_UNSUPPORTED_OPCODE(opname) ((void)0) @@ -438,7 +441,7 @@ write_u64(uint16_t *p, uint64_t val) } static inline void -write_obj(uint16_t *p, PyObject *val) +write_ptr(uint16_t *p, void *val) { memcpy(p, &val, sizeof(val)); } @@ -576,6 +579,16 @@ adaptive_counter_backoff(_Py_BackoffCounter counter) { return restart_backoff_counter(counter); } +/* Specialization Extensions */ + +/* callbacks for an external specialization */ +typedef int (*binaryopguardfunc)(PyObject *lhs, PyObject *rhs); +typedef PyObject *(*binaryopactionfunc)(PyObject *lhs, PyObject *rhs); + +typedef struct { + binaryopguardfunc guard; + binaryopactionfunc action; +} _PyBinaryOpSpecializationDescr; /* Comparison bit masks. */ diff --git a/Include/internal/pycore_debug_offsets.h b/Include/internal/pycore_debug_offsets.h index 184f4b9360b6d3..44feb079571a73 100644 --- a/Include/internal/pycore_debug_offsets.h +++ b/Include/internal/pycore_debug_offsets.h @@ -11,6 +11,42 @@ extern "C" { #define _Py_Debug_Cookie "xdebugpy" +#if defined(__APPLE__) +# include +#endif + +// Macros to burn global values in custom sections so out-of-process +// profilers can locate them easily. +#define GENERATE_DEBUG_SECTION(name, declaration) \ + _GENERATE_DEBUG_SECTION_WINDOWS(name) \ + _GENERATE_DEBUG_SECTION_APPLE(name) \ + declaration \ + _GENERATE_DEBUG_SECTION_LINUX(name) + +#if defined(MS_WINDOWS) +#define _GENERATE_DEBUG_SECTION_WINDOWS(name) \ + _Pragma(Py_STRINGIFY(section(Py_STRINGIFY(name), read, write))) \ + __declspec(allocate(Py_STRINGIFY(name))) +#else +#define _GENERATE_DEBUG_SECTION_WINDOWS(name) +#endif + +#if defined(__APPLE__) +#define _GENERATE_DEBUG_SECTION_APPLE(name) \ + __attribute__((section(SEG_DATA "," Py_STRINGIFY(name)))) \ + __attribute__((used)) +#else +#define _GENERATE_DEBUG_SECTION_APPLE(name) +#endif + +#if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) +#define _GENERATE_DEBUG_SECTION_LINUX(name) \ + __attribute__((section("." Py_STRINGIFY(name)))) \ + __attribute__((used)) +#else +#define _GENERATE_DEBUG_SECTION_LINUX(name) +#endif + #ifdef Py_GIL_DISABLED # define _Py_Debug_gilruntimestate_enabled offsetof(struct _gil_runtime_state, enabled) # define _Py_Debug_Free_Threaded 1 @@ -69,6 +105,7 @@ typedef struct _Py_DebugOffsets { uint64_t instr_ptr; uint64_t localsplus; uint64_t owner; + uint64_t stackpointer; } interpreter_frame; // Code object offset; @@ -113,6 +150,14 @@ typedef struct _Py_DebugOffsets { uint64_t ob_size; } list_object; + // PySet object offset; + struct _set_object { + uint64_t size; + uint64_t used; + uint64_t table; + uint64_t mask; + } set_object; + // PyDict object offset; struct _dict_object { uint64_t size; @@ -153,6 +198,14 @@ typedef struct _Py_DebugOffsets { uint64_t size; uint64_t collecting; } gc; + + // Generator object offset; + struct _gen_object { + uint64_t size; + uint64_t gi_name; + uint64_t gi_iframe; + uint64_t gi_frame_state; + } gen_object; } _Py_DebugOffsets; @@ -198,6 +251,7 @@ typedef struct _Py_DebugOffsets { .instr_ptr = offsetof(_PyInterpreterFrame, instr_ptr), \ .localsplus = offsetof(_PyInterpreterFrame, localsplus), \ .owner = offsetof(_PyInterpreterFrame, owner), \ + .stackpointer = offsetof(_PyInterpreterFrame, stackpointer), \ }, \ .code_object = { \ .size = sizeof(PyCodeObject), \ @@ -231,6 +285,12 @@ typedef struct _Py_DebugOffsets { .ob_item = offsetof(PyListObject, ob_item), \ .ob_size = offsetof(PyListObject, ob_base.ob_size), \ }, \ + .set_object = { \ + .size = sizeof(PySetObject), \ + .used = offsetof(PySetObject, used), \ + .table = offsetof(PySetObject, table), \ + .mask = offsetof(PySetObject, mask), \ + }, \ .dict_object = { \ .size = sizeof(PyDictObject), \ .ma_keys = offsetof(PyDictObject, ma_keys), \ @@ -260,6 +320,12 @@ typedef struct _Py_DebugOffsets { .size = sizeof(struct _gc_runtime_state), \ .collecting = offsetof(struct _gc_runtime_state, collecting), \ }, \ + .gen_object = { \ + .size = sizeof(PyGenObject), \ + .gi_name = offsetof(PyGenObject, gi_name), \ + .gi_iframe = offsetof(PyGenObject, gi_iframe), \ + .gi_frame_state = offsetof(PyGenObject, gi_frame_state), \ + }, \ } diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 71927006d1cd48..f4c55ca6cf64d2 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -114,6 +114,16 @@ extern Py_ssize_t _Py_dict_lookup_threadsafe_stackref(PyDictObject *mp, PyObject extern Py_ssize_t _PyDict_LookupIndex(PyDictObject *, PyObject *); extern Py_ssize_t _PyDictKeys_StringLookup(PyDictKeysObject* dictkeys, PyObject *key); + +/* Look up a string key in an all unicode dict keys, assign the keys object a version, and + * store it in version. + * + * Returns DKIX_ERROR if key is not a string or if the keys object is not all + * strings. + * + * Returns DKIX_EMPTY if the key is not present. + */ +extern Py_ssize_t _PyDictKeys_StringLookupAndVersion(PyDictKeysObject* dictkeys, PyObject *key, uint32_t *version); extern Py_ssize_t _PyDictKeys_StringLookupSplit(PyDictKeysObject* dictkeys, PyObject *key); PyAPI_FUNC(PyObject *)_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObject *); PyAPI_FUNC(void) _PyDict_LoadGlobalStackRef(PyDictObject *, PyDictObject *, PyObject *, _PyStackRef *); @@ -337,8 +347,7 @@ PyDictObject *_PyObject_MaterializeManagedDict_LockHeld(PyObject *); static inline Py_ssize_t _PyDict_UniqueId(PyDictObject *mp) { - // Offset by one so that _ma_watcher_tag=0 represents an unassigned id - return (Py_ssize_t)(mp->_ma_watcher_tag >> DICT_UNIQUE_ID_SHIFT) - 1; + return (Py_ssize_t)(mp->_ma_watcher_tag >> DICT_UNIQUE_ID_SHIFT); } static inline void diff --git a/Include/internal/pycore_emscripten_trampoline.h b/Include/internal/pycore_emscripten_trampoline.h index e519c99ad86cce..5546ebbbfcb5c1 100644 --- a/Include/internal/pycore_emscripten_trampoline.h +++ b/Include/internal/pycore_emscripten_trampoline.h @@ -27,24 +27,14 @@ #if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) -void _Py_EmscriptenTrampoline_Init(_PyRuntimeState *runtime); +void +_Py_EmscriptenTrampoline_Init(_PyRuntimeState *runtime); PyObject* -_PyEM_TrampolineCall_JavaScript(PyCFunctionWithKeywords func, - PyObject* self, - PyObject* args, - PyObject* kw); - -PyObject* -_PyEM_TrampolineCall_Reflection(PyCFunctionWithKeywords func, - PyObject* self, - PyObject* args, - PyObject* kw); - -#define _PyEM_TrampolineCall(meth, self, args, kw) \ - ((_PyRuntime.wasm_type_reflection_available) ? \ - (_PyEM_TrampolineCall_Reflection((PyCFunctionWithKeywords)(meth), (self), (args), (kw))) : \ - (_PyEM_TrampolineCall_JavaScript((PyCFunctionWithKeywords)(meth), (self), (args), (kw)))) +_PyEM_TrampolineCall(PyCFunctionWithKeywords func, + PyObject* self, + PyObject* args, + PyObject* kw); #define _PyCFunction_TrampolineCall(meth, self, args) \ _PyEM_TrampolineCall( \ @@ -62,8 +52,6 @@ _PyEM_TrampolineCall_Reflection(PyCFunctionWithKeywords func, #else // defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) -#define _Py_EmscriptenTrampoline_Init(runtime) - #define _PyCFunction_TrampolineCall(meth, self, args) \ (meth)((self), (args)) diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 96ae4dd22ecb43..8cc3504723b64c 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -56,7 +56,8 @@ enum _frameowner { FRAME_OWNED_BY_THREAD = 0, FRAME_OWNED_BY_GENERATOR = 1, FRAME_OWNED_BY_FRAME_OBJECT = 2, - FRAME_OWNED_BY_CSTACK = 3, + FRAME_OWNED_BY_INTERPRETER = 3, + FRAME_OWNED_BY_CSTACK = 4, }; typedef struct _PyInterpreterFrame { @@ -68,14 +69,19 @@ typedef struct _PyInterpreterFrame { PyObject *f_locals; /* Strong reference, may be NULL. Only valid if not on C stack */ PyFrameObject *frame_obj; /* Strong reference, may be NULL. Only valid if not on C stack */ _Py_CODEUNIT *instr_ptr; /* Instruction currently executing (or about to begin) */ + _PyStackRef *stackpointer; #ifdef Py_GIL_DISABLED /* Index of thread-local bytecode containing instr_ptr. */ int32_t tlbc_index; #endif - _PyStackRef *stackpointer; uint16_t return_offset; /* Only relevant during a function call */ char owner; - char visited; +#ifdef Py_DEBUG + uint8_t visited:1; + uint8_t lltrace:7; +#else + uint8_t visited; +#endif /* Locals and stack */ _PyStackRef localsplus[1]; } _PyInterpreterFrame; @@ -153,13 +159,6 @@ static inline void _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame * // Don't leave a dangling pointer to the old frame when creating generators // and coroutines: dest->previous = NULL; - -#ifdef Py_GIL_DISABLED - PyCodeObject *co = _PyFrame_GetCode(dest); - for (int i = stacktop; i < co->co_nlocalsplus + co->co_stacksize; i++) { - dest->localsplus[i] = PyStackRef_NULL; - } -#endif } #ifdef Py_GIL_DISABLED @@ -209,20 +208,13 @@ _PyFrame_Initialize( frame->return_offset = 0; frame->owner = FRAME_OWNED_BY_THREAD; frame->visited = 0; +#ifdef Py_DEBUG + frame->lltrace = 0; +#endif for (int i = null_locals_from; i < code->co_nlocalsplus; i++) { frame->localsplus[i] = PyStackRef_NULL; } - -#ifdef Py_GIL_DISABLED - // On GIL disabled, we walk the entire stack in GC. Since stacktop - // is not always in sync with the real stack pointer, we have - // no choice but to traverse the entire stack. - // This just makes sure we don't pass the GC invalid stack values. - for (int i = code->co_nlocalsplus; i < code->co_nlocalsplus + code->co_stacksize; i++) { - frame->localsplus[i] = PyStackRef_NULL; - } -#endif } /* Gets the pointer to the locals array @@ -264,7 +256,7 @@ _PyFrame_SetStackPointer(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer) static inline bool _PyFrame_IsIncomplete(_PyInterpreterFrame *frame) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { return true; } return frame->owner != FRAME_OWNED_BY_GENERATOR && @@ -392,14 +384,10 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int #endif frame->owner = FRAME_OWNED_BY_THREAD; frame->visited = 0; - frame->return_offset = 0; - -#ifdef Py_GIL_DISABLED - assert(code->co_nlocalsplus == 0); - for (int i = 0; i < code->co_stacksize; i++) { - frame->localsplus[i] = PyStackRef_NULL; - } +#ifdef Py_DEBUG + frame->lltrace = 0; #endif + frame->return_offset = 0; return frame; } diff --git a/Include/internal/pycore_freelist_state.h b/Include/internal/pycore_freelist_state.h index a1a94c1f2dc880..7c252f5b570c13 100644 --- a/Include/internal/pycore_freelist_state.h +++ b/Include/internal/pycore_freelist_state.h @@ -11,6 +11,8 @@ extern "C" { # define PyTuple_MAXSAVESIZE 20 // Largest tuple to save on freelist # define Py_tuple_MAXFREELIST 2000 // Maximum number of tuples of each size to save # define Py_lists_MAXFREELIST 80 +# define Py_list_iters_MAXFREELIST 10 +# define Py_tuple_iters_MAXFREELIST 10 # define Py_dicts_MAXFREELIST 80 # define Py_dictkeys_MAXFREELIST 80 # define Py_floats_MAXFREELIST 100 @@ -22,6 +24,7 @@ extern "C" { # define Py_futureiters_MAXFREELIST 255 # define Py_object_stack_chunks_MAXFREELIST 4 # define Py_unicode_writers_MAXFREELIST 1 +# define Py_pymethodobjects_MAXFREELIST 20 // A generic freelist of either PyObjects or other data structures. struct _Py_freelist { @@ -39,6 +42,8 @@ struct _Py_freelists { struct _Py_freelist ints; struct _Py_freelist tuples[PyTuple_MAXSAVESIZE]; struct _Py_freelist lists; + struct _Py_freelist list_iters; + struct _Py_freelist tuple_iters; struct _Py_freelist dicts; struct _Py_freelist dictkeys; struct _Py_freelist slices; @@ -48,6 +53,7 @@ struct _Py_freelists { struct _Py_freelist futureiters; struct _Py_freelist object_stack_chunks; struct _Py_freelist unicode_writers; + struct _Py_freelist pymethodobjects; }; #ifdef __cplusplus diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 4ff34bf8ead7d0..b1806df2706097 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -45,12 +45,13 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) { * the per-object lock. */ #ifdef Py_GIL_DISABLED -# define _PyGC_BITS_TRACKED (1) // Tracked by the GC -# define _PyGC_BITS_FINALIZED (2) // tp_finalize was called -# define _PyGC_BITS_UNREACHABLE (4) -# define _PyGC_BITS_FROZEN (8) -# define _PyGC_BITS_SHARED (16) -# define _PyGC_BITS_DEFERRED (64) // Use deferred reference counting +# define _PyGC_BITS_TRACKED (1<<0) // Tracked by the GC +# define _PyGC_BITS_FINALIZED (1<<1) // tp_finalize was called +# define _PyGC_BITS_UNREACHABLE (1<<2) +# define _PyGC_BITS_FROZEN (1<<3) +# define _PyGC_BITS_SHARED (1<<4) +# define _PyGC_BITS_ALIVE (1<<5) // Reachable from a known root. +# define _PyGC_BITS_DEFERRED (1<<6) // Use deferred reference counting #endif #ifdef Py_GIL_DISABLED @@ -330,6 +331,9 @@ struct _gc_runtime_state { collections, and are awaiting to undergo a full collection for the first time. */ Py_ssize_t long_lived_pending; + + /* True if gc.freeze() has been used. */ + int freeze_active; #endif }; diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 318c712bdfa174..5fe60df0a92fbc 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -31,12 +31,6 @@ extern int _PyImport_FixupBuiltin( PyObject *modules ); -// Export for many shared extensions, like '_json' -PyAPI_FUNC(PyObject*) _PyImport_GetModuleAttr(PyObject *, PyObject *); - -// Export for many shared extensions, like '_datetime' -PyAPI_FUNC(PyObject*) _PyImport_GetModuleAttrString(const char *, const char *); - struct _import_runtime_state { /* The builtin modules (defined in config.c). */ diff --git a/Include/internal/pycore_instruments.h b/Include/internal/pycore_instruments.h index 4e5b374968ea98..92d8f056f402fc 100644 --- a/Include/internal/pycore_instruments.h +++ b/Include/internal/pycore_instruments.h @@ -48,8 +48,8 @@ _Py_call_instrumentation_instruction( _Py_CODEUNIT * _Py_call_instrumentation_jump( - PyThreadState *tstate, int event, - _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target); + _Py_CODEUNIT *instr, PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *src, _Py_CODEUNIT *dest); extern int _Py_call_instrumentation_arg(PyThreadState *tstate, int event, diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index a3c14dceffd7a0..6f00eca8de05af 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -31,7 +31,7 @@ extern "C" { #include "pycore_list.h" // struct _Py_list_state #include "pycore_mimalloc.h" // struct _mimalloc_interp_state #include "pycore_object_state.h" // struct _py_object_state -#include "pycore_optimizer.h" // _PyOptimizerObject +#include "pycore_optimizer.h" // _PyExecutorObject #include "pycore_obmalloc.h" // struct _obmalloc_state #include "pycore_qsbr.h" // struct _qsbr_state #include "pycore_stackref.h" // Py_STACKREF_DEBUG @@ -262,7 +262,7 @@ struct _is { struct ast_state ast; struct types_state types; struct callable_cache callable_cache; - _PyOptimizerObject *optimizer; + bool jit; _PyExecutorObject *executor_list_head; size_t trace_run_counter; _rare_events rare_events; @@ -341,43 +341,6 @@ extern void _PyInterpreterState_SetWhence( extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp); -// Get a copy of the current interpreter configuration. -// -// Return 0 on success. Raise an exception and return -1 on error. -// -// The caller must initialize 'config', using PyConfig_InitPythonConfig() -// for example. -// -// Python must be preinitialized to call this method. -// The caller must hold the GIL. -// -// Once done with the configuration, PyConfig_Clear() must be called to clear -// it. -// -// Export for '_testinternalcapi' shared extension. -PyAPI_FUNC(int) _PyInterpreterState_GetConfigCopy( - struct PyConfig *config); - -// Set the configuration of the current interpreter. -// -// This function should be called during or just after the Python -// initialization. -// -// Update the sys module with the new configuration. If the sys module was -// modified directly after the Python initialization, these changes are lost. -// -// Some configuration like faulthandler or warnoptions can be updated in the -// configuration, but don't reconfigure Python (don't enable/disable -// faulthandler and don't reconfigure warnings filters). -// -// Return 0 on success. Raise an exception and return -1 on error. -// -// The configuration should come from _PyInterpreterState_GetConfigCopy(). -// -// Export for '_testinternalcapi' shared extension. -PyAPI_FUNC(int) _PyInterpreterState_SetConfig( - const struct PyConfig *config); - /* Runtime Feature Flags diff --git a/Include/internal/pycore_list.h b/Include/internal/pycore_list.h index 836ff30abfcedb..5d817891408481 100644 --- a/Include/internal/pycore_list.h +++ b/Include/internal/pycore_list.h @@ -61,7 +61,7 @@ typedef struct { union _PyStackRef; -PyAPI_FUNC(PyObject *)_PyList_FromStackRefSteal(const union _PyStackRef *src, Py_ssize_t n); +PyAPI_FUNC(PyObject *)_PyList_FromStackRefStealOnSuccess(const union _PyStackRef *src, Py_ssize_t n); PyAPI_FUNC(PyObject *)_PyList_AsTupleAndClear(PyListObject *v); #ifdef __cplusplus diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 8bead00e70640c..c52eb77692dd6a 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -159,13 +159,14 @@ PyAPI_FUNC(int) _PyLong_Size_t_Converter(PyObject *, void *); /* Long value tag bits: * 0-1: Sign bits value = (1-sign), ie. negative=2, positive=0, zero=1. - * 2: Reserved for immortality bit + * 2: Set to 1 for the small ints * 3+ Unsigned digit count */ #define SIGN_MASK 3 #define SIGN_ZERO 1 #define SIGN_NEGATIVE 2 #define NON_SIZE_BITS 3 +#define IMMORTALITY_BIT_MASK (1 << 2) /* The functions _PyLong_IsCompact and _PyLong_CompactValue are defined * in Include/cpython/longobject.h, since they need to be inline. @@ -196,7 +197,7 @@ PyAPI_FUNC(int) _PyLong_Size_t_Converter(PyObject *, void *); static inline int _PyLong_IsNonNegativeCompact(const PyLongObject* op) { assert(PyLong_Check(op)); - return op->long_value.lv_tag <= (1 << NON_SIZE_BITS); + return ((op->long_value.lv_tag & ~IMMORTALITY_BIT_MASK) <= (1 << NON_SIZE_BITS)); } @@ -298,7 +299,7 @@ _PyLong_FlipSign(PyLongObject *op) { .long_value = { \ .lv_tag = TAG_FROM_SIGN_AND_SIZE( \ (val) == 0 ? 0 : ((val) < 0 ? -1 : 1), \ - (val) == 0 ? 0 : 1), \ + (val) == 0 ? 0 : 1) | IMMORTALITY_BIT_MASK, \ { ((val) >= 0 ? (val) : -(val)) }, \ } \ } diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h index ec3685d2034560..5a0b6dae8a5ad6 100644 --- a/Include/internal/pycore_magic_number.h +++ b/Include/internal/pycore_magic_number.h @@ -264,6 +264,10 @@ Known values: Python 3.14a2 3609 (Add LOAD_SMALL_INT and LOAD_CONST_IMMORTAL instructions, remove RETURN_CONST) Python 3.14a4 3610 (Add VALUE_WITH_FAKE_GLOBALS format to annotationlib) Python 3.14a4 3611 (Add NOT_TAKEN instruction) + Python 3.14a4 3612 (Add POP_ITER and INSTRUMENTED_POP_ITER) + Python 3.14a4 3613 (Add LOAD_CONST_MORTAL instruction) + Python 3.14a5 3614 (Add BINARY_OP_EXTEND) + Python 3.14a5 3615 (CALL_FUNCTION_EX always take a kwargs argument) Python 3.15 will start with 3650 @@ -276,7 +280,7 @@ PC/launcher.c must also be updated. */ -#define PYC_MAGIC_NUMBER 3611 +#define PYC_MAGIC_NUMBER 3615 /* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes (little-endian) and then appending b'\r\n'. */ #define PYC_MAGIC_NUMBER_TOKEN \ diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index d7d68f938a9f0a..0b1df7e68b8dfa 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -62,7 +62,7 @@ extern void _Py_ForgetReference(PyObject *); PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *); /* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid - designated initializer conflicts in C++20. If we use the deinition in + designated initializer conflicts in C++20. If we use the definition in object.h, we will be mixing designated and non-designated initializers in pycore objects which is forbiddent in C++20. However, if we then use designated initializers in object.h then Extensions without designated break. @@ -120,7 +120,7 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc( PyAPI_DATA(Py_ssize_t) _Py_RefTotal; extern void _Py_AddRefTotal(PyThreadState *, Py_ssize_t); -extern void _Py_IncRefTotal(PyThreadState *); +extern PyAPI_FUNC(void) _Py_IncRefTotal(PyThreadState *); extern void _Py_DecRefTotal(PyThreadState *); # define _Py_DEC_REFTOTAL(interp) \ @@ -299,12 +299,6 @@ Py_ssize_t _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra); extern int _PyType_CheckConsistency(PyTypeObject *type); extern int _PyDict_CheckConsistency(PyObject *mp, int check_content); -/* Update the Python traceback of an object. This function must be called - when a memory block is reused from a free list. - - Internal function called by _Py_NewReference(). */ -extern int _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void*); - // Fast inlined version of PyType_HasFeature() static inline int _PyType_HasFeature(PyTypeObject *type, unsigned long feature) { @@ -342,20 +336,20 @@ _Py_THREAD_INCREF_OBJECT(PyObject *obj, Py_ssize_t unique_id) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET(); - // Unsigned comparison so that `unique_id=-1`, which indicates that - // per-thread refcounting has been disabled on this object, is handled by - // the "else". - if ((size_t)unique_id < (size_t)tstate->refcounts.size) { + // The table index is `unique_id - 1` because 0 is not a valid unique id. + // Unsigned comparison so that `idx=-1` is handled by the "else". + size_t idx = (size_t)(unique_id - 1); + if (idx < (size_t)tstate->refcounts.size) { # ifdef Py_REF_DEBUG _Py_INCREF_IncRefTotal(); # endif _Py_INCREF_STAT_INC(); - tstate->refcounts.values[unique_id]++; + tstate->refcounts.values[idx]++; } else { // The slow path resizes the per-thread refcount array if necessary. - // It handles the unique_id=-1 case to keep the inlinable function smaller. - _PyObject_ThreadIncrefSlow(obj, unique_id); + // It handles the unique_id=0 case to keep the inlinable function smaller. + _PyObject_ThreadIncrefSlow(obj, idx); } } @@ -392,15 +386,15 @@ _Py_THREAD_DECREF_OBJECT(PyObject *obj, Py_ssize_t unique_id) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET(); - // Unsigned comparison so that `unique_id=-1`, which indicates that - // per-thread refcounting has been disabled on this object, is handled by - // the "else". - if ((size_t)unique_id < (size_t)tstate->refcounts.size) { + // The table index is `unique_id - 1` because 0 is not a valid unique id. + // Unsigned comparison so that `idx=-1` is handled by the "else". + size_t idx = (size_t)(unique_id - 1); + if (idx < (size_t)tstate->refcounts.size) { # ifdef Py_REF_DEBUG _Py_DECREF_DecRefTotal(); # endif _Py_DECREF_STAT_INC(); - tstate->refcounts.values[unique_id]--; + tstate->refcounts.values[idx]--; } else { // Directly decref the object if the id is not assigned or if diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 5fb236836dccd9..377a885dbb8c34 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -43,6 +43,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 2; case BINARY_OP_ADD_UNICODE: return 2; + case BINARY_OP_EXTEND: + return 2; case BINARY_OP_INPLACE_ADD_UNICODE: return 2; case BINARY_OP_MULTIPLY_FLOAT: @@ -74,7 +76,7 @@ int _PyOpcode_num_popped(int opcode, int oparg) { case BUILD_SET: return oparg; case BUILD_SLICE: - return 2 + ((oparg == 3) ? 1 : 0); + return oparg; case BUILD_STRING: return oparg; case BUILD_TUPLE: @@ -98,7 +100,7 @@ int _PyOpcode_num_popped(int opcode, int oparg) { case CALL_BUILTIN_O: return 2 + oparg; case CALL_FUNCTION_EX: - return 3 + (oparg & 1); + return 4; case CALL_INTRINSIC_1: return 1; case CALL_INTRINSIC_2: @@ -245,6 +247,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 0; case INSTRUMENTED_NOT_TAKEN: return 0; + case INSTRUMENTED_POP_ITER: + return 1; case INSTRUMENTED_POP_JUMP_IF_FALSE: return 0; case INSTRUMENTED_POP_JUMP_IF_NONE: @@ -267,8 +271,12 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 0; case JUMP_BACKWARD: return 0; + case JUMP_BACKWARD_JIT: + return 0; case JUMP_BACKWARD_NO_INTERRUPT: return 0; + case JUMP_BACKWARD_NO_JIT: + return 0; case JUMP_FORWARD: return 0; case JUMP_IF_FALSE: @@ -319,6 +327,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 0; case LOAD_CONST_IMMORTAL: return 0; + case LOAD_CONST_MORTAL: + return 0; case LOAD_DEREF: return 0; case LOAD_FAST: @@ -375,6 +385,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 0; case POP_EXCEPT: return 1; + case POP_ITER: + return 1; case POP_JUMP_IF_FALSE: return 1; case POP_JUMP_IF_NONE: @@ -506,6 +518,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 1; case BINARY_OP_ADD_UNICODE: return 1; + case BINARY_OP_EXTEND: + return 1; case BINARY_OP_INPLACE_ADD_UNICODE: return 0; case BINARY_OP_MULTIPLY_FLOAT: @@ -708,6 +722,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 0; case INSTRUMENTED_NOT_TAKEN: return 0; + case INSTRUMENTED_POP_ITER: + return 0; case INSTRUMENTED_POP_JUMP_IF_FALSE: return 0; case INSTRUMENTED_POP_JUMP_IF_NONE: @@ -730,8 +746,12 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 0; case JUMP_BACKWARD: return 0; + case JUMP_BACKWARD_JIT: + return 0; case JUMP_BACKWARD_NO_INTERRUPT: return 0; + case JUMP_BACKWARD_NO_JIT: + return 0; case JUMP_FORWARD: return 0; case JUMP_IF_FALSE: @@ -745,7 +765,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { case LIST_EXTEND: return 1 + (oparg-1); case LOAD_ATTR: - return 1 + (oparg & 1); + return 1 + (oparg&1); case LOAD_ATTR_CLASS: return 1 + (oparg & 1); case LOAD_ATTR_CLASS_WITH_METACLASS_CHECK: @@ -782,6 +802,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 1; case LOAD_CONST_IMMORTAL: return 1; + case LOAD_CONST_MORTAL: + return 1; case LOAD_DEREF: return 1; case LOAD_FAST: @@ -838,6 +860,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 0; case POP_EXCEPT: return 0; + case POP_ITER: + return 0; case POP_JUMP_IF_FALSE: return 0; case POP_JUMP_IF_NONE: @@ -977,6 +1001,10 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = 0; return 0; } + case BINARY_OP_EXTEND: { + *effect = 0; + return 0; + } case BINARY_OP_INPLACE_ADD_UNICODE: { *effect = 0; return 0; @@ -1038,7 +1066,7 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { return 0; } case BUILD_SLICE: { - *effect = -1 - ((oparg == 3) ? 1 : 0); + *effect = 1 - oparg; return 0; } case BUILD_STRING: { @@ -1094,7 +1122,7 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { return 0; } case CALL_FUNCTION_EX: { - *effect = Py_MAX(0, -2 - (oparg & 1)); + *effect = 0; return 0; } case CALL_INTRINSIC_1: { @@ -1399,6 +1427,10 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = 0; return 0; } + case INSTRUMENTED_POP_ITER: { + *effect = -1; + return 0; + } case INSTRUMENTED_POP_JUMP_IF_FALSE: { *effect = 0; return 0; @@ -1443,10 +1475,18 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = 0; return 0; } + case JUMP_BACKWARD_JIT: { + *effect = 0; + return 0; + } case JUMP_BACKWARD_NO_INTERRUPT: { *effect = 0; return 0; } + case JUMP_BACKWARD_NO_JIT: { + *effect = 0; + return 0; + } case JUMP_FORWARD: { *effect = 0; return 0; @@ -1472,7 +1512,9 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { return 0; } case LOAD_ATTR: { - *effect = Py_MAX(1, (oparg & 1)); + int max_eff = Py_MAX(1, (oparg & 1)); + max_eff = Py_MAX(max_eff, (oparg&1)); + *effect = max_eff; return 0; } case LOAD_ATTR_CLASS: { @@ -1524,7 +1566,7 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { return 0; } case LOAD_ATTR_WITH_HINT: { - *effect = Py_MAX(0, (oparg & 1)); + *effect = Py_MAX(1, (oparg & 1)); return 0; } case LOAD_BUILD_CLASS: { @@ -1547,6 +1589,10 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = 1; return 0; } + case LOAD_CONST_MORTAL: { + *effect = 1; + return 0; + } case LOAD_DEREF: { *effect = 1; return 0; @@ -1659,6 +1705,10 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = -1; return 0; } + case POP_ITER: { + *effect = -1; + return 0; + } case POP_JUMP_IF_FALSE: { *effect = -1; return 0; @@ -1895,11 +1945,13 @@ enum InstructionFormat { INSTR_FMT_IBC = 2, INSTR_FMT_IBC00 = 3, INSTR_FMT_IBC000 = 4, - INSTR_FMT_IBC00000000 = 5, - INSTR_FMT_IX = 6, - INSTR_FMT_IXC = 7, - INSTR_FMT_IXC00 = 8, - INSTR_FMT_IXC000 = 9, + INSTR_FMT_IBC0000 = 5, + INSTR_FMT_IBC00000000 = 6, + INSTR_FMT_IX = 7, + INSTR_FMT_IXC = 8, + INSTR_FMT_IXC00 = 9, + INSTR_FMT_IXC000 = 10, + INSTR_FMT_IXC0000 = 11, }; #define IS_VALID_OPCODE(OP) \ @@ -1921,6 +1973,7 @@ enum InstructionFormat { #define HAS_PASSTHROUGH_FLAG (4096) #define HAS_OPARG_AND_1_FLAG (8192) #define HAS_ERROR_NO_POP_FLAG (16384) +#define HAS_NO_SAVE_IP_FLAG (32768) #define OPCODE_HAS_ARG(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ARG_FLAG)) #define OPCODE_HAS_CONST(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_CONST_FLAG)) #define OPCODE_HAS_NAME(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_NAME_FLAG)) @@ -1936,6 +1989,7 @@ enum InstructionFormat { #define OPCODE_HAS_PASSTHROUGH(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_PASSTHROUGH_FLAG)) #define OPCODE_HAS_OPARG_AND_1(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_OPARG_AND_1_FLAG)) #define OPCODE_HAS_ERROR_NO_POP(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ERROR_NO_POP_FLAG)) +#define OPCODE_HAS_NO_SAVE_IP(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_NO_SAVE_IP_FLAG)) #define OPARG_FULL 0 #define OPARG_CACHE_1 1 @@ -1948,45 +2002,46 @@ enum InstructionFormat { struct opcode_metadata { uint8_t valid_entry; - int8_t instr_format; - int16_t flags; + uint8_t instr_format; + uint16_t flags; }; extern const struct opcode_metadata _PyOpcode_opcode_metadata[266]; #ifdef NEED_OPCODE_METADATA const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { - [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP] = { true, INSTR_FMT_IBC0000, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_EXTEND] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, [BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, [BUILD_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_SLICE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [BUILD_STRING] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, [CACHE] = { true, INSTR_FMT_IX, 0 }, [CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_ALLOC_AND_ENTER_INIT] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, - [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [CALL_BOUND_METHOD_GENERAL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_CLASS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_O] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_INTRINSIC_1] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_INTRINSIC_2] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_ISINSTANCE] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, @@ -1995,7 +2050,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [CALL_KW_NON_PY] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_KW_PY] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_LEN] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, - [CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, + [CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -2005,7 +2060,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [CALL_PY_GENERAL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_STR_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_TUPLE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_TYPE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [CALL_TYPE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [CHECK_EG_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CHECK_EXC_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CLEANUP_THROW] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, @@ -2028,7 +2083,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [DICT_MERGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [DICT_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [END_ASYNC_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, - [END_FOR] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, + [END_FOR] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG | HAS_NO_SAVE_IP_FLAG }, [END_SEND] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [ENTER_EXECUTOR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [EXIT_INIT_CHECK] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, @@ -2037,9 +2092,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [FORMAT_WITH_SPEC] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [FOR_ITER_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [FOR_ITER_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG }, + [FOR_ITER_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [FOR_ITER_RANGE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG }, + [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [GET_AITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [GET_ANEXT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [GET_AWAITABLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -2051,18 +2106,19 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 }, [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG | HAS_NO_SAVE_IP_FLAG }, [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [INSTRUMENTED_LINE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IXC, 0 }, [INSTRUMENTED_NOT_TAKEN] = { true, INSTR_FMT_IX, 0 }, + [INSTRUMENTED_POP_ITER] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, + [INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, [INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -2070,7 +2126,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [INTERPRETER_EXIT] = { true, INSTR_FMT_IX, 0 }, [IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_BACKWARD_JIT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [JUMP_BACKWARD_NO_JIT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [LIST_APPEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [LIST_EXTEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -2078,11 +2136,11 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, @@ -2090,8 +2148,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_COMMON_CONSTANT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_PURE_FLAG }, + [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, [LOAD_CONST_IMMORTAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, + [LOAD_CONST_MORTAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, [LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG }, [LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, @@ -2108,8 +2167,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [LOAD_SPECIAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, + [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MATCH_CLASS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -2119,6 +2178,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [NOP] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [NOT_TAKEN] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [POP_EXCEPT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [POP_ITER] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, @@ -2140,19 +2200,19 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [SET_FUNCTION_ATTRIBUTE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [SET_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_ATTR] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_EXIT_FLAG }, - [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, + [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [STORE_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [STORE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG }, - [STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [STORE_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [STORE_FAST_STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG }, + [STORE_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG }, + [STORE_FAST_STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG }, [STORE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, + [STORE_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [SWAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_PURE_FLAG }, [TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, @@ -2180,7 +2240,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [SETUP_CLEANUP] = { true, -1, HAS_PURE_FLAG | HAS_ARG_FLAG }, [SETUP_FINALLY] = { true, -1, HAS_PURE_FLAG | HAS_ARG_FLAG }, [SETUP_WITH] = { true, -1, HAS_PURE_FLAG | HAS_ARG_FLAG }, - [STORE_FAST_MAYBE_NULL] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [STORE_FAST_MAYBE_NULL] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG }, }; #endif @@ -2198,6 +2258,7 @@ _PyOpcode_macro_expansion[256] = { [BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } }, [BINARY_OP_ADD_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_ADD_INT, 0, 0 } } }, [BINARY_OP_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_ADD_UNICODE, 0, 0 } } }, + [BINARY_OP_EXTEND] = { .nuops = 2, .uops = { { _GUARD_BINARY_OP_EXTEND, 4, 1 }, { _BINARY_OP_EXTEND, 4, 1 } } }, [BINARY_OP_INPLACE_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_INPLACE_ADD_UNICODE, 0, 0 } } }, [BINARY_OP_MULTIPLY_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_MULTIPLY_FLOAT, 0, 0 } } }, [BINARY_OP_MULTIPLY_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_MULTIPLY_INT, 0, 0 } } }, @@ -2261,7 +2322,7 @@ _PyOpcode_macro_expansion[256] = { [DELETE_SUBSCR] = { .nuops = 1, .uops = { { _DELETE_SUBSCR, 0, 0 } } }, [DICT_MERGE] = { .nuops = 1, .uops = { { _DICT_MERGE, 0, 0 } } }, [DICT_UPDATE] = { .nuops = 1, .uops = { { _DICT_UPDATE, 0, 0 } } }, - [END_FOR] = { .nuops = 1, .uops = { { _POP_TOP, 0, 0 } } }, + [END_FOR] = { .nuops = 1, .uops = { { _END_FOR, 0, 0 } } }, [END_SEND] = { .nuops = 1, .uops = { { _END_SEND, 0, 0 } } }, [EXIT_INIT_CHECK] = { .nuops = 1, .uops = { { _EXIT_INIT_CHECK, 0, 0 } } }, [FORMAT_SIMPLE] = { .nuops = 1, .uops = { { _FORMAT_SIMPLE, 0, 0 } } }, @@ -2283,31 +2344,31 @@ _PyOpcode_macro_expansion[256] = { [LIST_APPEND] = { .nuops = 1, .uops = { { _LIST_APPEND, 0, 0 } } }, [LIST_EXTEND] = { .nuops = 1, .uops = { { _LIST_EXTEND, 0, 0 } } }, [LOAD_ATTR] = { .nuops = 1, .uops = { { _LOAD_ATTR, 0, 0 } } }, - [LOAD_ATTR_CLASS] = { .nuops = 2, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _LOAD_ATTR_CLASS, 4, 5 } } }, - [LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = { .nuops = 3, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _GUARD_TYPE_VERSION, 2, 3 }, { _LOAD_ATTR_CLASS, 4, 5 } } }, - [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, + [LOAD_ATTR_CLASS] = { .nuops = 3, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _LOAD_ATTR_CLASS, 4, 5 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, + [LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = { .nuops = 4, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _GUARD_TYPE_VERSION, 2, 3 }, { _LOAD_ATTR_CLASS, 4, 5 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, + [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, [LOAD_ATTR_METHOD_LAZY_DICT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_METHOD_LAZY_DICT, 1, 3 }, { _LOAD_ATTR_METHOD_LAZY_DICT, 4, 5 } } }, [LOAD_ATTR_METHOD_NO_DICT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_METHOD_NO_DICT, 4, 5 } } }, [LOAD_ATTR_METHOD_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_METHOD_WITH_VALUES, 4, 5 } } }, - [LOAD_ATTR_MODULE] = { .nuops = 2, .uops = { { _CHECK_ATTR_MODULE_PUSH_KEYS, 2, 1 }, { _LOAD_ATTR_MODULE_FROM_KEYS, 1, 3 } } }, + [LOAD_ATTR_MODULE] = { .nuops = 3, .uops = { { _CHECK_ATTR_MODULE_PUSH_KEYS, 2, 1 }, { _LOAD_ATTR_MODULE_FROM_KEYS, 1, 3 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_NONDESCRIPTOR_NO_DICT, 4, 5 } } }, [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, 4, 5 } } }, [LOAD_ATTR_PROPERTY] = { .nuops = 5, .uops = { { _CHECK_PEP_523, 0, 0 }, { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_PROPERTY_FRAME, 4, 5 }, { _SAVE_RETURN_OFFSET, 7, 9 }, { _PUSH_FRAME, 0, 0 } } }, - [LOAD_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 } } }, - [LOAD_ATTR_WITH_HINT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_WITH_HINT, 0, 0 }, { _LOAD_ATTR_WITH_HINT, 1, 3 } } }, + [LOAD_ATTR_SLOT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, + [LOAD_ATTR_WITH_HINT] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_WITH_HINT, 0, 0 }, { _LOAD_ATTR_WITH_HINT, 1, 3 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, [LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { _LOAD_BUILD_CLASS, 0, 0 } } }, [LOAD_COMMON_CONSTANT] = { .nuops = 1, .uops = { { _LOAD_COMMON_CONSTANT, 0, 0 } } }, - [LOAD_CONST] = { .nuops = 1, .uops = { { _LOAD_CONST, 0, 0 } } }, [LOAD_CONST_IMMORTAL] = { .nuops = 1, .uops = { { _LOAD_CONST_IMMORTAL, 0, 0 } } }, + [LOAD_CONST_MORTAL] = { .nuops = 1, .uops = { { _LOAD_CONST_MORTAL, 0, 0 } } }, [LOAD_DEREF] = { .nuops = 1, .uops = { { _LOAD_DEREF, 0, 0 } } }, [LOAD_FAST] = { .nuops = 1, .uops = { { _LOAD_FAST, 0, 0 } } }, [LOAD_FAST_AND_CLEAR] = { .nuops = 1, .uops = { { _LOAD_FAST_AND_CLEAR, 0, 0 } } }, [LOAD_FAST_CHECK] = { .nuops = 1, .uops = { { _LOAD_FAST_CHECK, 0, 0 } } }, [LOAD_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { _LOAD_FAST, 5, 0 }, { _LOAD_FAST, 6, 0 } } }, [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { _LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, - [LOAD_GLOBAL] = { .nuops = 1, .uops = { { _LOAD_GLOBAL, 0, 0 } } }, - [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION_PUSH_KEYS, 1, 2 }, { _LOAD_GLOBAL_BUILTINS_FROM_KEYS, 1, 3 } } }, - [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION_PUSH_KEYS, 1, 1 }, { _LOAD_GLOBAL_MODULE_FROM_KEYS, 1, 3 } } }, + [LOAD_GLOBAL] = { .nuops = 2, .uops = { { _LOAD_GLOBAL, 0, 0 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, + [LOAD_GLOBAL_BUILTIN] = { .nuops = 4, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION_PUSH_KEYS, 1, 2 }, { _LOAD_GLOBAL_BUILTINS_FROM_KEYS, 1, 3 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, + [LOAD_GLOBAL_MODULE] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION_PUSH_KEYS, 1, 1 }, { _LOAD_GLOBAL_MODULE_FROM_KEYS, 1, 3 }, { _PUSH_NULL_CONDITIONAL, 0, 0 } } }, [LOAD_LOCALS] = { .nuops = 1, .uops = { { _LOAD_LOCALS, 0, 0 } } }, [LOAD_NAME] = { .nuops = 1, .uops = { { _LOAD_NAME, 0, 0 } } }, [LOAD_SMALL_INT] = { .nuops = 1, .uops = { { _LOAD_SMALL_INT, 0, 0 } } }, @@ -2324,6 +2385,7 @@ _PyOpcode_macro_expansion[256] = { [NOP] = { .nuops = 1, .uops = { { _NOP, 0, 0 } } }, [NOT_TAKEN] = { .nuops = 1, .uops = { { _NOP, 0, 0 } } }, [POP_EXCEPT] = { .nuops = 1, .uops = { { _POP_EXCEPT, 0, 0 } } }, + [POP_ITER] = { .nuops = 1, .uops = { { _POP_TOP, 0, 0 } } }, [POP_JUMP_IF_FALSE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_FALSE, 9, 1 } } }, [POP_JUMP_IF_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_TRUE, 9, 1 } } }, [POP_JUMP_IF_NOT_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_FALSE, 9, 1 } } }, @@ -2381,6 +2443,7 @@ const char *_PyOpcode_OpName[266] = { [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", [BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE", + [BINARY_OP_EXTEND] = "BINARY_OP_EXTEND", [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", @@ -2482,6 +2545,7 @@ const char *_PyOpcode_OpName[266] = { [INSTRUMENTED_LINE] = "INSTRUMENTED_LINE", [INSTRUMENTED_LOAD_SUPER_ATTR] = "INSTRUMENTED_LOAD_SUPER_ATTR", [INSTRUMENTED_NOT_TAKEN] = "INSTRUMENTED_NOT_TAKEN", + [INSTRUMENTED_POP_ITER] = "INSTRUMENTED_POP_ITER", [INSTRUMENTED_POP_JUMP_IF_FALSE] = "INSTRUMENTED_POP_JUMP_IF_FALSE", [INSTRUMENTED_POP_JUMP_IF_NONE] = "INSTRUMENTED_POP_JUMP_IF_NONE", [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = "INSTRUMENTED_POP_JUMP_IF_NOT_NONE", @@ -2493,7 +2557,9 @@ const char *_PyOpcode_OpName[266] = { [IS_OP] = "IS_OP", [JUMP] = "JUMP", [JUMP_BACKWARD] = "JUMP_BACKWARD", + [JUMP_BACKWARD_JIT] = "JUMP_BACKWARD_JIT", [JUMP_BACKWARD_NO_INTERRUPT] = "JUMP_BACKWARD_NO_INTERRUPT", + [JUMP_BACKWARD_NO_JIT] = "JUMP_BACKWARD_NO_JIT", [JUMP_FORWARD] = "JUMP_FORWARD", [JUMP_IF_FALSE] = "JUMP_IF_FALSE", [JUMP_IF_TRUE] = "JUMP_IF_TRUE", @@ -2519,6 +2585,7 @@ const char *_PyOpcode_OpName[266] = { [LOAD_COMMON_CONSTANT] = "LOAD_COMMON_CONSTANT", [LOAD_CONST] = "LOAD_CONST", [LOAD_CONST_IMMORTAL] = "LOAD_CONST_IMMORTAL", + [LOAD_CONST_MORTAL] = "LOAD_CONST_MORTAL", [LOAD_DEREF] = "LOAD_DEREF", [LOAD_FAST] = "LOAD_FAST", [LOAD_FAST_AND_CLEAR] = "LOAD_FAST_AND_CLEAR", @@ -2547,6 +2614,7 @@ const char *_PyOpcode_OpName[266] = { [NOT_TAKEN] = "NOT_TAKEN", [POP_BLOCK] = "POP_BLOCK", [POP_EXCEPT] = "POP_EXCEPT", + [POP_ITER] = "POP_ITER", [POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE", [POP_JUMP_IF_NONE] = "POP_JUMP_IF_NONE", [POP_JUMP_IF_NOT_NONE] = "POP_JUMP_IF_NOT_NONE", @@ -2628,7 +2696,7 @@ const uint8_t _PyOpcode_Caches[256] = { [FOR_ITER] = 1, [CALL] = 3, [CALL_KW] = 3, - [BINARY_OP] = 1, + [BINARY_OP] = 5, }; #endif @@ -2639,6 +2707,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [BINARY_OP_ADD_FLOAT] = BINARY_OP, [BINARY_OP_ADD_INT] = BINARY_OP, [BINARY_OP_ADD_UNICODE] = BINARY_OP, + [BINARY_OP_EXTEND] = BINARY_OP, [BINARY_OP_INPLACE_ADD_UNICODE] = BINARY_OP, [BINARY_OP_MULTIPLY_FLOAT] = BINARY_OP, [BINARY_OP_MULTIPLY_INT] = BINARY_OP, @@ -2740,6 +2809,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [INSTRUMENTED_LINE] = INSTRUMENTED_LINE, [INSTRUMENTED_LOAD_SUPER_ATTR] = INSTRUMENTED_LOAD_SUPER_ATTR, [INSTRUMENTED_NOT_TAKEN] = INSTRUMENTED_NOT_TAKEN, + [INSTRUMENTED_POP_ITER] = INSTRUMENTED_POP_ITER, [INSTRUMENTED_POP_JUMP_IF_FALSE] = INSTRUMENTED_POP_JUMP_IF_FALSE, [INSTRUMENTED_POP_JUMP_IF_NONE] = INSTRUMENTED_POP_JUMP_IF_NONE, [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = INSTRUMENTED_POP_JUMP_IF_NOT_NONE, @@ -2750,7 +2820,9 @@ const uint8_t _PyOpcode_Deopt[256] = { [INTERPRETER_EXIT] = INTERPRETER_EXIT, [IS_OP] = IS_OP, [JUMP_BACKWARD] = JUMP_BACKWARD, + [JUMP_BACKWARD_JIT] = JUMP_BACKWARD, [JUMP_BACKWARD_NO_INTERRUPT] = JUMP_BACKWARD_NO_INTERRUPT, + [JUMP_BACKWARD_NO_JIT] = JUMP_BACKWARD, [JUMP_FORWARD] = JUMP_FORWARD, [LIST_APPEND] = LIST_APPEND, [LIST_EXTEND] = LIST_EXTEND, @@ -2772,6 +2844,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [LOAD_COMMON_CONSTANT] = LOAD_COMMON_CONSTANT, [LOAD_CONST] = LOAD_CONST, [LOAD_CONST_IMMORTAL] = LOAD_CONST, + [LOAD_CONST_MORTAL] = LOAD_CONST, [LOAD_DEREF] = LOAD_DEREF, [LOAD_FAST] = LOAD_FAST, [LOAD_FAST_AND_CLEAR] = LOAD_FAST_AND_CLEAR, @@ -2799,6 +2872,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [NOP] = NOP, [NOT_TAKEN] = NOT_TAKEN, [POP_EXCEPT] = POP_EXCEPT, + [POP_ITER] = POP_ITER, [POP_JUMP_IF_FALSE] = POP_JUMP_IF_FALSE, [POP_JUMP_IF_NONE] = POP_JUMP_IF_NONE, [POP_JUMP_IF_NOT_NONE] = POP_JUMP_IF_NOT_NONE, @@ -2856,7 +2930,6 @@ const uint8_t _PyOpcode_Deopt[256] = { #endif // NEED_OPCODE_METADATA #define EXTRA_CASES \ - case 117: \ case 118: \ case 119: \ case 120: \ @@ -2888,14 +2961,9 @@ const uint8_t _PyOpcode_Deopt[256] = { case 146: \ case 147: \ case 148: \ - case 228: \ - case 229: \ - case 230: \ - case 231: \ case 232: \ case 233: \ case 234: \ - case 235: \ ; struct pseudo_targets { uint8_t as_sequence; diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index bc7cfcde613d65..e806e306d2d57f 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -83,28 +83,6 @@ typedef struct _PyExecutorObject { _PyExitData exits[1]; } _PyExecutorObject; -typedef struct _PyOptimizerObject _PyOptimizerObject; - -/* Should return > 0 if a new executor is created. O if no executor is produced and < 0 if an error occurred. */ -typedef int (*_Py_optimize_func)( - _PyOptimizerObject* self, struct _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr, _PyExecutorObject **exec_ptr, - int curr_stackentries, bool progress_needed); - -struct _PyOptimizerObject { - PyObject_HEAD - _Py_optimize_func optimize; - /* Data needed by the optimizer goes here, but is opaque to the VM */ -}; - -/** Test support **/ -typedef struct { - _PyOptimizerObject base; - int64_t count; -} _PyCounterOptimizerObject; - -_PyOptimizerObject *_Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject* optimizer); - // Export for '_opcode' shared extension (JIT compiler). PyAPI_FUNC(_PyExecutorObject*) _Py_GetExecutor(PyCodeObject *code, int offset); @@ -115,13 +93,6 @@ void _Py_BloomFilter_Init(_PyBloomFilter *); void _Py_BloomFilter_Add(_PyBloomFilter *bloom, void *obj); PyAPI_FUNC(void) _Py_Executor_DependsOn(_PyExecutorObject *executor, void *obj); -// For testing -// Export for '_testinternalcapi' shared extension. -PyAPI_FUNC(_PyOptimizerObject *) _Py_GetOptimizer(void); -PyAPI_FUNC(int) _Py_SetTier2Optimizer(_PyOptimizerObject* optimizer); -PyAPI_FUNC(PyObject *) _PyOptimizer_NewCounter(void); -PyAPI_FUNC(PyObject *) _PyOptimizer_NewUOpOptimizer(void); - #define _Py_MAX_ALLOWED_BUILTINS_MODIFICATIONS 3 #define _Py_MAX_ALLOWED_GLOBALS_MODIFICATIONS 6 @@ -150,21 +121,8 @@ int _Py_uop_analyze_and_optimize(struct _PyInterpreterFrame *frame, _PyUOpInstruction *trace, int trace_len, int curr_stackentries, _PyBloomFilter *dependencies); -extern PyTypeObject _PyCounterExecutor_Type; -extern PyTypeObject _PyCounterOptimizer_Type; -extern PyTypeObject _PyDefaultOptimizer_Type; extern PyTypeObject _PyUOpExecutor_Type; -extern PyTypeObject _PyUOpOptimizer_Type; -/* Symbols */ -/* See explanation in optimizer_symbols.c */ - -struct _Py_UopsSymbol { - int flags; // 0 bits: Top; 2 or more bits: Bottom - PyTypeObject *typ; // Borrowed reference - PyObject *const_val; // Owned reference (!) - unsigned int type_version; // currently stores type version -}; #define UOP_FORMAT_TARGET 0 #define UOP_FORMAT_JUMP 1 @@ -201,16 +159,63 @@ static inline uint16_t uop_get_error_target(const _PyUOpInstruction *inst) // handle before rejoining the rest of the program. #define MAX_CHAIN_DEPTH 4 -typedef struct _Py_UopsSymbol _Py_UopsSymbol; +/* Symbols */ +/* See explanation in optimizer_symbols.c */ + + +typedef enum _JitSymType { + JIT_SYM_UNKNOWN_TAG = 1, + JIT_SYM_NULL_TAG = 2, + JIT_SYM_NON_NULL_TAG = 3, + JIT_SYM_BOTTOM_TAG = 4, + JIT_SYM_TYPE_VERSION_TAG = 5, + JIT_SYM_KNOWN_CLASS_TAG = 6, + JIT_SYM_KNOWN_VALUE_TAG = 7, + JIT_SYM_TUPLE_TAG = 8, +} JitSymType; + +typedef struct _jit_opt_known_class { + uint8_t tag; + uint32_t version; + PyTypeObject *type; +} JitOptKnownClass; + +typedef struct _jit_opt_known_version { + uint8_t tag; + uint32_t version; +} JitOptKnownVersion; + +typedef struct _jit_opt_known_value { + uint8_t tag; + PyObject *value; +} JitOptKnownValue; + +#define MAX_SYMBOLIC_TUPLE_SIZE 7 + +typedef struct _jit_opt_tuple { + uint8_t tag; + uint8_t length; + uint16_t items[MAX_SYMBOLIC_TUPLE_SIZE]; +} JitOptTuple; + +typedef union _jit_opt_symbol { + uint8_t tag; + JitOptKnownClass cls; + JitOptKnownValue value; + JitOptKnownVersion version; + JitOptTuple tuple; +} JitOptSymbol; + + struct _Py_UOpsAbstractFrame { // Max stacklen int stack_len; int locals_len; - _Py_UopsSymbol **stack_pointer; - _Py_UopsSymbol **stack; - _Py_UopsSymbol **locals; + JitOptSymbol **stack_pointer; + JitOptSymbol **stack; + JitOptSymbol **locals; }; typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; @@ -218,10 +223,10 @@ typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; typedef struct ty_arena { int ty_curr_number; int ty_max_number; - _Py_UopsSymbol arena[TY_ARENA_SIZE]; + JitOptSymbol arena[TY_ARENA_SIZE]; } ty_arena; -struct _Py_UOpsContext { +typedef struct _JitOptContext { char done; char out_of_space; bool contradiction; @@ -233,46 +238,47 @@ struct _Py_UOpsContext { // Arena for the symbolic types. ty_arena t_arena; - _Py_UopsSymbol **n_consumed; - _Py_UopsSymbol **limit; - _Py_UopsSymbol *locals_and_stack[MAX_ABSTRACT_INTERP_SIZE]; -}; - -typedef struct _Py_UOpsContext _Py_UOpsContext; - -extern bool _Py_uop_sym_is_null(_Py_UopsSymbol *sym); -extern bool _Py_uop_sym_is_not_null(_Py_UopsSymbol *sym); -extern bool _Py_uop_sym_is_const(_Py_UopsSymbol *sym); -extern PyObject *_Py_uop_sym_get_const(_Py_UopsSymbol *sym); -extern _Py_UopsSymbol *_Py_uop_sym_new_unknown(_Py_UOpsContext *ctx); -extern _Py_UopsSymbol *_Py_uop_sym_new_not_null(_Py_UOpsContext *ctx); -extern _Py_UopsSymbol *_Py_uop_sym_new_type( - _Py_UOpsContext *ctx, PyTypeObject *typ); -extern _Py_UopsSymbol *_Py_uop_sym_new_const(_Py_UOpsContext *ctx, PyObject *const_val); -extern _Py_UopsSymbol *_Py_uop_sym_new_null(_Py_UOpsContext *ctx); -extern bool _Py_uop_sym_has_type(_Py_UopsSymbol *sym); -extern bool _Py_uop_sym_matches_type(_Py_UopsSymbol *sym, PyTypeObject *typ); -extern bool _Py_uop_sym_matches_type_version(_Py_UopsSymbol *sym, unsigned int version); -extern void _Py_uop_sym_set_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym); -extern void _Py_uop_sym_set_non_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym); -extern void _Py_uop_sym_set_type(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyTypeObject *typ); -extern bool _Py_uop_sym_set_type_version(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, unsigned int version); -extern void _Py_uop_sym_set_const(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyObject *const_val); -extern bool _Py_uop_sym_is_bottom(_Py_UopsSymbol *sym); -extern int _Py_uop_sym_truthiness(_Py_UopsSymbol *sym); -extern PyTypeObject *_Py_uop_sym_get_type(_Py_UopsSymbol *sym); - - -extern void _Py_uop_abstractcontext_init(_Py_UOpsContext *ctx); -extern void _Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx); + JitOptSymbol **n_consumed; + JitOptSymbol **limit; + JitOptSymbol *locals_and_stack[MAX_ABSTRACT_INTERP_SIZE]; +} JitOptContext; + +extern bool _Py_uop_sym_is_null(JitOptSymbol *sym); +extern bool _Py_uop_sym_is_not_null(JitOptSymbol *sym); +extern bool _Py_uop_sym_is_const(JitOptSymbol *sym); +extern PyObject *_Py_uop_sym_get_const(JitOptSymbol *sym); +extern JitOptSymbol *_Py_uop_sym_new_unknown(JitOptContext *ctx); +extern JitOptSymbol *_Py_uop_sym_new_not_null(JitOptContext *ctx); +extern JitOptSymbol *_Py_uop_sym_new_type( + JitOptContext *ctx, PyTypeObject *typ); +extern JitOptSymbol *_Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val); +extern JitOptSymbol *_Py_uop_sym_new_null(JitOptContext *ctx); +extern bool _Py_uop_sym_has_type(JitOptSymbol *sym); +extern bool _Py_uop_sym_matches_type(JitOptSymbol *sym, PyTypeObject *typ); +extern bool _Py_uop_sym_matches_type_version(JitOptSymbol *sym, unsigned int version); +extern void _Py_uop_sym_set_null(JitOptContext *ctx, JitOptSymbol *sym); +extern void _Py_uop_sym_set_non_null(JitOptContext *ctx, JitOptSymbol *sym); +extern void _Py_uop_sym_set_type(JitOptContext *ctx, JitOptSymbol *sym, PyTypeObject *typ); +extern bool _Py_uop_sym_set_type_version(JitOptContext *ctx, JitOptSymbol *sym, unsigned int version); +extern void _Py_uop_sym_set_const(JitOptContext *ctx, JitOptSymbol *sym, PyObject *const_val); +extern bool _Py_uop_sym_is_bottom(JitOptSymbol *sym); +extern int _Py_uop_sym_truthiness(JitOptSymbol *sym); +extern PyTypeObject *_Py_uop_sym_get_type(JitOptSymbol *sym); +extern bool _Py_uop_sym_is_immortal(JitOptSymbol *sym); +extern JitOptSymbol *_Py_uop_sym_new_tuple(JitOptContext *ctx, int size, JitOptSymbol **args); +extern JitOptSymbol *_Py_uop_sym_tuple_getitem(JitOptContext *ctx, JitOptSymbol *sym, int item); +extern int _Py_uop_sym_tuple_length(JitOptSymbol *sym); + +extern void _Py_uop_abstractcontext_init(JitOptContext *ctx); +extern void _Py_uop_abstractcontext_fini(JitOptContext *ctx); extern _Py_UOpsAbstractFrame *_Py_uop_frame_new( - _Py_UOpsContext *ctx, + JitOptContext *ctx, PyCodeObject *co, int curr_stackentries, - _Py_UopsSymbol **args, + JitOptSymbol **args, int arg_len); -extern int _Py_uop_frame_pop(_Py_UOpsContext *ctx); +extern int _Py_uop_frame_pop(JitOptContext *ctx); PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored); diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h index 6f2fdda9a9f12f..fa7d9ee36d095d 100644 --- a/Include/internal/pycore_pyerrors.h +++ b/Include/internal/pycore_pyerrors.h @@ -190,6 +190,15 @@ Py_DEPRECATED(3.12) extern void _PyErr_ChainExceptions(PyObject *, PyObject *, P PyAPI_DATA(PyTypeObject) _PyExc_IncompleteInputError; #define PyExc_IncompleteInputError ((PyObject *)(&_PyExc_IncompleteInputError)) +extern int _PyUnicodeError_GetParams( + PyObject *self, + PyObject **obj, + Py_ssize_t *objlen, + Py_ssize_t *start, + Py_ssize_t *end, + Py_ssize_t *slen, + int as_bytes); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 1e73e541ef8de0..ff3b222b157810 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -300,6 +300,19 @@ PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); // See also PyInterpreterState_Get() and _PyInterpreterState_GET(). extern PyInterpreterState* _PyGILState_GetInterpreterStateUnsafe(void); +#ifndef NDEBUG +/* Modern equivalent of assert(PyGILState_Check()) */ +static inline void +_Py_AssertHoldsTstateFunc(const char *func) +{ + PyThreadState *tstate = _PyThreadState_GET(); + _Py_EnsureFuncTstateNotNULL(func, tstate); +} +#define _Py_AssertHoldsTstate() _Py_AssertHoldsTstateFunc(__func__) +#else +#define _Py_AssertHoldsTstate() +#endif + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 86d024535fdda8..cf123791eba9ac 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -172,7 +172,7 @@ typedef struct pyruntimestate { #if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) // Used in "Python/emscripten_trampoline.c" to choose between type // reflection trampoline and EM_JS trampoline. - bool wasm_type_reflection_available; + int (*emscripten_count_args_function)(PyCFunctionWithKeywords func); #endif /* All the objects that are shared by the runtime's interpreters. */ diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h index 7ddc5bac5d10af..572e8025876319 100644 --- a/Include/internal/pycore_tracemalloc.h +++ b/Include/internal/pycore_tracemalloc.h @@ -11,10 +11,6 @@ extern "C" { #include "pycore_hashtable.h" // _Py_hashtable_t -/* Trace memory blocks allocated by PyMem_RawMalloc() */ -#define TRACE_RAW_MALLOC - - struct _PyTraceMalloc_Config { /* Module initialized? Variable protected by the GIL */ @@ -25,7 +21,7 @@ struct _PyTraceMalloc_Config { } initialized; /* Is tracemalloc tracing memory allocations? - Variable protected by the GIL */ + Variable protected by the TABLES_LOCK(). */ int tracing; /* limit of the number of frames in a traceback, 1 by default. @@ -74,9 +70,7 @@ struct _tracemalloc_runtime_state { PyMemAllocatorEx obj; } allocators; -#if defined(TRACE_RAW_MALLOC) - PyThread_type_lock tables_lock; -#endif + PyMutex tables_lock; /* Size in bytes of currently traced memory. Protected by TABLES_LOCK(). */ size_t traced_memory; @@ -85,14 +79,14 @@ struct _tracemalloc_runtime_state { size_t peak_traced_memory; /* Hash table used as a set to intern filenames: PyObject* => PyObject*. - Protected by the GIL */ + Protected by the TABLES_LOCK(). */ _Py_hashtable_t *filenames; /* Buffer to store a new traceback in traceback_new(). - Protected by the GIL. */ + Protected by the TABLES_LOCK(). */ struct tracemalloc_traceback *traceback; /* Hash table used as a set to intern tracebacks: traceback_t* => traceback_t* - Protected by the GIL */ + Protected by the TABLES_LOCK(). */ _Py_hashtable_t *tracebacks; /* pointer (void*) => trace (trace_t*). Protected by TABLES_LOCK(). */ @@ -144,7 +138,7 @@ extern PyObject* _PyTraceMalloc_GetTraces(void); extern PyObject* _PyTraceMalloc_GetObjectTraceback(PyObject *obj); /* Initialize tracemalloc */ -extern int _PyTraceMalloc_Init(void); +extern PyStatus _PyTraceMalloc_Init(void); /* Start tracemalloc */ extern int _PyTraceMalloc_Start(int max_nframe); diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h index b8bea72baeaaf5..74e1452763e56c 100644 --- a/Include/internal/pycore_tstate.h +++ b/Include/internal/pycore_tstate.h @@ -22,6 +22,7 @@ typedef struct _PyThreadStateImpl { PyThreadState base; PyObject *asyncio_running_loop; // Strong reference + PyObject *asyncio_running_task; // Strong reference struct _qsbr_thread_state *qsbr; // only used by free-threaded build struct llist_node mem_free_queue; // delayed free queue diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index 82b875241f4116..dc68d6648b9ec8 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -21,7 +21,7 @@ extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *); #define _PyTuple_ITEMS(op) _Py_RVALUE(_PyTuple_CAST(op)->ob_item) PyAPI_FUNC(PyObject *)_PyTuple_FromArray(PyObject *const *, Py_ssize_t); -PyAPI_FUNC(PyObject *)_PyTuple_FromStackRefSteal(const union _PyStackRef *, Py_ssize_t); +PyAPI_FUNC(PyObject *)_PyTuple_FromStackRefStealOnSuccess(const union _PyStackRef *, Py_ssize_t); PyAPI_FUNC(PyObject *)_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t); typedef struct { diff --git a/Include/internal/pycore_uniqueid.h b/Include/internal/pycore_uniqueid.h index d3db49ddb78103..9d3c866a704894 100644 --- a/Include/internal/pycore_uniqueid.h +++ b/Include/internal/pycore_uniqueid.h @@ -16,7 +16,7 @@ extern "C" { // Per-thread reference counting is used along with deferred reference // counting to avoid scaling bottlenecks due to reference count contention. // -// An id of -1 is used to indicate that an object doesn't use per-thread +// An id of 0 is used to indicate that an object doesn't use per-thread // refcounting. This value is used when the object is finalized by the GC // and during interpreter shutdown to allow the object to be // deallocated promptly when the object's refcount reaches zero. @@ -45,6 +45,8 @@ struct _Py_unique_id_pool { Py_ssize_t size; }; +#define _Py_INVALID_UNIQUE_ID 0 + // Assigns the next id from the pool of ids. extern Py_ssize_t _PyObject_AssignUniqueId(PyObject *obj); @@ -65,7 +67,7 @@ extern void _PyObject_FinalizePerThreadRefcounts(_PyThreadStateImpl *tstate); extern void _PyObject_FinalizeUniqueIdPool(PyInterpreterState *interp); // Increfs the object, resizing the thread-local refcount array if necessary. -PyAPI_FUNC(void) _PyObject_ThreadIncrefSlow(PyObject *obj, Py_ssize_t unique_id); +PyAPI_FUNC(void) _PyObject_ThreadIncrefSlow(PyObject *obj, size_t idx); #endif /* Py_GIL_DISABLED */ diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index 92515b4230ccb4..7a6c0d22fe24e5 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -15,16 +15,17 @@ extern "C" { #define _BINARY_OP_ADD_FLOAT 303 #define _BINARY_OP_ADD_INT 304 #define _BINARY_OP_ADD_UNICODE 305 -#define _BINARY_OP_INPLACE_ADD_UNICODE 306 -#define _BINARY_OP_MULTIPLY_FLOAT 307 -#define _BINARY_OP_MULTIPLY_INT 308 -#define _BINARY_OP_SUBTRACT_FLOAT 309 -#define _BINARY_OP_SUBTRACT_INT 310 -#define _BINARY_SLICE 311 -#define _BINARY_SUBSCR 312 -#define _BINARY_SUBSCR_CHECK_FUNC 313 +#define _BINARY_OP_EXTEND 306 +#define _BINARY_OP_INPLACE_ADD_UNICODE 307 +#define _BINARY_OP_MULTIPLY_FLOAT 308 +#define _BINARY_OP_MULTIPLY_INT 309 +#define _BINARY_OP_SUBTRACT_FLOAT 310 +#define _BINARY_OP_SUBTRACT_INT 311 +#define _BINARY_SLICE 312 +#define _BINARY_SUBSCR 313 +#define _BINARY_SUBSCR_CHECK_FUNC 314 #define _BINARY_SUBSCR_DICT BINARY_SUBSCR_DICT -#define _BINARY_SUBSCR_INIT_CALL 314 +#define _BINARY_SUBSCR_INIT_CALL 315 #define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT #define _BINARY_SUBSCR_STR_INT BINARY_SUBSCR_STR_INT #define _BINARY_SUBSCR_TUPLE_INT BINARY_SUBSCR_TUPLE_INT @@ -34,121 +35,123 @@ extern "C" { #define _BUILD_SLICE BUILD_SLICE #define _BUILD_STRING BUILD_STRING #define _BUILD_TUPLE BUILD_TUPLE -#define _CALL_BUILTIN_CLASS 315 -#define _CALL_BUILTIN_FAST 316 -#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 317 -#define _CALL_BUILTIN_O 318 +#define _CALL_BUILTIN_CLASS 316 +#define _CALL_BUILTIN_FAST 317 +#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 318 +#define _CALL_BUILTIN_O 319 #define _CALL_INTRINSIC_1 CALL_INTRINSIC_1 #define _CALL_INTRINSIC_2 CALL_INTRINSIC_2 #define _CALL_ISINSTANCE CALL_ISINSTANCE -#define _CALL_KW_NON_PY 319 +#define _CALL_KW_NON_PY 320 #define _CALL_LEN CALL_LEN #define _CALL_LIST_APPEND CALL_LIST_APPEND -#define _CALL_METHOD_DESCRIPTOR_FAST 320 -#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 321 -#define _CALL_METHOD_DESCRIPTOR_NOARGS 322 -#define _CALL_METHOD_DESCRIPTOR_O 323 -#define _CALL_NON_PY_GENERAL 324 -#define _CALL_STR_1 325 -#define _CALL_TUPLE_1 326 +#define _CALL_METHOD_DESCRIPTOR_FAST 321 +#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 322 +#define _CALL_METHOD_DESCRIPTOR_NOARGS 323 +#define _CALL_METHOD_DESCRIPTOR_O 324 +#define _CALL_NON_PY_GENERAL 325 +#define _CALL_STR_1 326 +#define _CALL_TUPLE_1 327 #define _CALL_TYPE_1 CALL_TYPE_1 -#define _CHECK_AND_ALLOCATE_OBJECT 327 -#define _CHECK_ATTR_CLASS 328 -#define _CHECK_ATTR_METHOD_LAZY_DICT 329 -#define _CHECK_ATTR_MODULE_PUSH_KEYS 330 -#define _CHECK_ATTR_WITH_HINT 331 -#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 332 +#define _CHECK_AND_ALLOCATE_OBJECT 328 +#define _CHECK_ATTR_CLASS 329 +#define _CHECK_ATTR_METHOD_LAZY_DICT 330 +#define _CHECK_ATTR_MODULE_PUSH_KEYS 331 +#define _CHECK_ATTR_WITH_HINT 332 +#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 333 #define _CHECK_EG_MATCH CHECK_EG_MATCH #define _CHECK_EXC_MATCH CHECK_EXC_MATCH -#define _CHECK_FUNCTION 333 -#define _CHECK_FUNCTION_EXACT_ARGS 334 -#define _CHECK_FUNCTION_VERSION 335 -#define _CHECK_FUNCTION_VERSION_INLINE 336 -#define _CHECK_FUNCTION_VERSION_KW 337 -#define _CHECK_IS_NOT_PY_CALLABLE 338 -#define _CHECK_IS_NOT_PY_CALLABLE_KW 339 -#define _CHECK_MANAGED_OBJECT_HAS_VALUES 340 -#define _CHECK_METHOD_VERSION 341 -#define _CHECK_METHOD_VERSION_KW 342 -#define _CHECK_PEP_523 343 -#define _CHECK_PERIODIC 344 -#define _CHECK_PERIODIC_IF_NOT_YIELD_FROM 345 -#define _CHECK_STACK_SPACE 346 -#define _CHECK_STACK_SPACE_OPERAND 347 -#define _CHECK_VALIDITY 348 -#define _CHECK_VALIDITY_AND_SET_IP 349 -#define _COMPARE_OP 350 -#define _COMPARE_OP_FLOAT 351 -#define _COMPARE_OP_INT 352 -#define _COMPARE_OP_STR 353 -#define _CONTAINS_OP 354 +#define _CHECK_FUNCTION 334 +#define _CHECK_FUNCTION_EXACT_ARGS 335 +#define _CHECK_FUNCTION_VERSION 336 +#define _CHECK_FUNCTION_VERSION_INLINE 337 +#define _CHECK_FUNCTION_VERSION_KW 338 +#define _CHECK_IS_NOT_PY_CALLABLE 339 +#define _CHECK_IS_NOT_PY_CALLABLE_KW 340 +#define _CHECK_MANAGED_OBJECT_HAS_VALUES 341 +#define _CHECK_METHOD_VERSION 342 +#define _CHECK_METHOD_VERSION_KW 343 +#define _CHECK_PEP_523 344 +#define _CHECK_PERIODIC 345 +#define _CHECK_PERIODIC_IF_NOT_YIELD_FROM 346 +#define _CHECK_STACK_SPACE 347 +#define _CHECK_STACK_SPACE_OPERAND 348 +#define _CHECK_VALIDITY 349 +#define _CHECK_VALIDITY_AND_SET_IP 350 +#define _COMPARE_OP 351 +#define _COMPARE_OP_FLOAT 352 +#define _COMPARE_OP_INT 353 +#define _COMPARE_OP_STR 354 +#define _CONTAINS_OP 355 #define _CONTAINS_OP_DICT CONTAINS_OP_DICT #define _CONTAINS_OP_SET CONTAINS_OP_SET #define _CONVERT_VALUE CONVERT_VALUE #define _COPY COPY #define _COPY_FREE_VARS COPY_FREE_VARS -#define _CREATE_INIT_FRAME 355 +#define _CREATE_INIT_FRAME 356 #define _DELETE_ATTR DELETE_ATTR #define _DELETE_DEREF DELETE_DEREF #define _DELETE_FAST DELETE_FAST #define _DELETE_GLOBAL DELETE_GLOBAL #define _DELETE_NAME DELETE_NAME #define _DELETE_SUBSCR DELETE_SUBSCR -#define _DEOPT 356 +#define _DEOPT 357 #define _DICT_MERGE DICT_MERGE #define _DICT_UPDATE DICT_UPDATE -#define _DO_CALL 357 -#define _DO_CALL_FUNCTION_EX 358 -#define _DO_CALL_KW 359 -#define _DYNAMIC_EXIT 360 +#define _DO_CALL 358 +#define _DO_CALL_FUNCTION_EX 359 +#define _DO_CALL_KW 360 +#define _DYNAMIC_EXIT 361 +#define _END_FOR END_FOR #define _END_SEND END_SEND -#define _ERROR_POP_N 361 +#define _ERROR_POP_N 362 #define _EXIT_INIT_CHECK EXIT_INIT_CHECK -#define _EXPAND_METHOD 362 -#define _EXPAND_METHOD_KW 363 -#define _FATAL_ERROR 364 +#define _EXPAND_METHOD 363 +#define _EXPAND_METHOD_KW 364 +#define _FATAL_ERROR 365 #define _FORMAT_SIMPLE FORMAT_SIMPLE #define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC -#define _FOR_ITER 365 -#define _FOR_ITER_GEN_FRAME 366 -#define _FOR_ITER_TIER_TWO 367 +#define _FOR_ITER 366 +#define _FOR_ITER_GEN_FRAME 367 +#define _FOR_ITER_TIER_TWO 368 #define _GET_AITER GET_AITER #define _GET_ANEXT GET_ANEXT #define _GET_AWAITABLE GET_AWAITABLE #define _GET_ITER GET_ITER #define _GET_LEN GET_LEN #define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER -#define _GUARD_BOTH_FLOAT 368 -#define _GUARD_BOTH_INT 369 -#define _GUARD_BOTH_UNICODE 370 -#define _GUARD_BUILTINS_VERSION_PUSH_KEYS 371 -#define _GUARD_DORV_NO_DICT 372 -#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 373 -#define _GUARD_GLOBALS_VERSION 374 -#define _GUARD_GLOBALS_VERSION_PUSH_KEYS 375 -#define _GUARD_IS_FALSE_POP 376 -#define _GUARD_IS_NONE_POP 377 -#define _GUARD_IS_NOT_NONE_POP 378 -#define _GUARD_IS_TRUE_POP 379 -#define _GUARD_KEYS_VERSION 380 -#define _GUARD_NOS_FLOAT 381 -#define _GUARD_NOS_INT 382 -#define _GUARD_NOT_EXHAUSTED_LIST 383 -#define _GUARD_NOT_EXHAUSTED_RANGE 384 -#define _GUARD_NOT_EXHAUSTED_TUPLE 385 -#define _GUARD_TOS_FLOAT 386 -#define _GUARD_TOS_INT 387 -#define _GUARD_TYPE_VERSION 388 -#define _GUARD_TYPE_VERSION_AND_LOCK 389 +#define _GUARD_BINARY_OP_EXTEND 369 +#define _GUARD_BOTH_FLOAT 370 +#define _GUARD_BOTH_INT 371 +#define _GUARD_BOTH_UNICODE 372 +#define _GUARD_BUILTINS_VERSION_PUSH_KEYS 373 +#define _GUARD_DORV_NO_DICT 374 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 375 +#define _GUARD_GLOBALS_VERSION 376 +#define _GUARD_GLOBALS_VERSION_PUSH_KEYS 377 +#define _GUARD_IS_FALSE_POP 378 +#define _GUARD_IS_NONE_POP 379 +#define _GUARD_IS_NOT_NONE_POP 380 +#define _GUARD_IS_TRUE_POP 381 +#define _GUARD_KEYS_VERSION 382 +#define _GUARD_NOS_FLOAT 383 +#define _GUARD_NOS_INT 384 +#define _GUARD_NOT_EXHAUSTED_LIST 385 +#define _GUARD_NOT_EXHAUSTED_RANGE 386 +#define _GUARD_NOT_EXHAUSTED_TUPLE 387 +#define _GUARD_TOS_FLOAT 388 +#define _GUARD_TOS_INT 389 +#define _GUARD_TYPE_VERSION 390 +#define _GUARD_TYPE_VERSION_AND_LOCK 391 #define _IMPORT_FROM IMPORT_FROM #define _IMPORT_NAME IMPORT_NAME -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 390 -#define _INIT_CALL_PY_EXACT_ARGS 391 -#define _INIT_CALL_PY_EXACT_ARGS_0 392 -#define _INIT_CALL_PY_EXACT_ARGS_1 393 -#define _INIT_CALL_PY_EXACT_ARGS_2 394 -#define _INIT_CALL_PY_EXACT_ARGS_3 395 -#define _INIT_CALL_PY_EXACT_ARGS_4 396 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 392 +#define _INIT_CALL_PY_EXACT_ARGS 393 +#define _INIT_CALL_PY_EXACT_ARGS_0 394 +#define _INIT_CALL_PY_EXACT_ARGS_1 395 +#define _INIT_CALL_PY_EXACT_ARGS_2 396 +#define _INIT_CALL_PY_EXACT_ARGS_3 397 +#define _INIT_CALL_PY_EXACT_ARGS_4 398 #define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX #define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW #define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER @@ -161,143 +164,136 @@ extern "C" { #define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE #define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE #define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE -#define _INTERNAL_INCREMENT_OPT_COUNTER 397 -#define _IS_NONE 398 +#define _IS_NONE 399 #define _IS_OP IS_OP -#define _ITER_CHECK_LIST 399 -#define _ITER_CHECK_RANGE 400 -#define _ITER_CHECK_TUPLE 401 -#define _ITER_JUMP_LIST 402 -#define _ITER_JUMP_RANGE 403 -#define _ITER_JUMP_TUPLE 404 -#define _ITER_NEXT_LIST 405 -#define _ITER_NEXT_RANGE 406 -#define _ITER_NEXT_TUPLE 407 -#define _JUMP_TO_TOP 408 +#define _ITER_CHECK_LIST 400 +#define _ITER_CHECK_RANGE 401 +#define _ITER_CHECK_TUPLE 402 +#define _ITER_JUMP_LIST 403 +#define _ITER_JUMP_RANGE 404 +#define _ITER_JUMP_TUPLE 405 +#define _ITER_NEXT_LIST 406 +#define _ITER_NEXT_RANGE 407 +#define _ITER_NEXT_TUPLE 408 +#define _JUMP_TO_TOP 409 #define _LIST_APPEND LIST_APPEND #define _LIST_EXTEND LIST_EXTEND -#define _LOAD_ATTR 409 -#define _LOAD_ATTR_CLASS 410 -#define _LOAD_ATTR_CLASS_0 411 -#define _LOAD_ATTR_CLASS_1 412 +#define _LOAD_ATTR 410 +#define _LOAD_ATTR_CLASS 411 #define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN -#define _LOAD_ATTR_INSTANCE_VALUE 413 -#define _LOAD_ATTR_INSTANCE_VALUE_0 414 -#define _LOAD_ATTR_INSTANCE_VALUE_1 415 -#define _LOAD_ATTR_METHOD_LAZY_DICT 416 -#define _LOAD_ATTR_METHOD_NO_DICT 417 -#define _LOAD_ATTR_METHOD_WITH_VALUES 418 -#define _LOAD_ATTR_MODULE 419 -#define _LOAD_ATTR_MODULE_FROM_KEYS 420 -#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 421 -#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 422 -#define _LOAD_ATTR_PROPERTY_FRAME 423 -#define _LOAD_ATTR_SLOT 424 -#define _LOAD_ATTR_SLOT_0 425 -#define _LOAD_ATTR_SLOT_1 426 -#define _LOAD_ATTR_WITH_HINT 427 +#define _LOAD_ATTR_INSTANCE_VALUE 412 +#define _LOAD_ATTR_METHOD_LAZY_DICT 413 +#define _LOAD_ATTR_METHOD_NO_DICT 414 +#define _LOAD_ATTR_METHOD_WITH_VALUES 415 +#define _LOAD_ATTR_MODULE 416 +#define _LOAD_ATTR_MODULE_FROM_KEYS 417 +#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 418 +#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 419 +#define _LOAD_ATTR_PROPERTY_FRAME 420 +#define _LOAD_ATTR_SLOT 421 +#define _LOAD_ATTR_WITH_HINT 422 #define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS -#define _LOAD_BYTECODE 428 +#define _LOAD_BYTECODE 423 #define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT #define _LOAD_CONST LOAD_CONST #define _LOAD_CONST_IMMORTAL LOAD_CONST_IMMORTAL -#define _LOAD_CONST_INLINE 429 -#define _LOAD_CONST_INLINE_BORROW 430 -#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 431 -#define _LOAD_CONST_INLINE_WITH_NULL 432 +#define _LOAD_CONST_INLINE 424 +#define _LOAD_CONST_INLINE_BORROW 425 +#define _LOAD_CONST_MORTAL LOAD_CONST_MORTAL #define _LOAD_DEREF LOAD_DEREF -#define _LOAD_FAST 433 -#define _LOAD_FAST_0 434 -#define _LOAD_FAST_1 435 -#define _LOAD_FAST_2 436 -#define _LOAD_FAST_3 437 -#define _LOAD_FAST_4 438 -#define _LOAD_FAST_5 439 -#define _LOAD_FAST_6 440 -#define _LOAD_FAST_7 441 +#define _LOAD_FAST 426 +#define _LOAD_FAST_0 427 +#define _LOAD_FAST_1 428 +#define _LOAD_FAST_2 429 +#define _LOAD_FAST_3 430 +#define _LOAD_FAST_4 431 +#define _LOAD_FAST_5 432 +#define _LOAD_FAST_6 433 +#define _LOAD_FAST_7 434 #define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR #define _LOAD_FAST_CHECK LOAD_FAST_CHECK #define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST #define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF #define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS -#define _LOAD_GLOBAL 442 -#define _LOAD_GLOBAL_BUILTINS 443 -#define _LOAD_GLOBAL_BUILTINS_FROM_KEYS 444 -#define _LOAD_GLOBAL_MODULE 445 -#define _LOAD_GLOBAL_MODULE_FROM_KEYS 446 +#define _LOAD_GLOBAL 435 +#define _LOAD_GLOBAL_BUILTINS 436 +#define _LOAD_GLOBAL_BUILTINS_FROM_KEYS 437 +#define _LOAD_GLOBAL_MODULE 438 +#define _LOAD_GLOBAL_MODULE_FROM_KEYS 439 #define _LOAD_LOCALS LOAD_LOCALS #define _LOAD_NAME LOAD_NAME -#define _LOAD_SMALL_INT 447 -#define _LOAD_SMALL_INT_0 448 -#define _LOAD_SMALL_INT_1 449 -#define _LOAD_SMALL_INT_2 450 -#define _LOAD_SMALL_INT_3 451 +#define _LOAD_SMALL_INT 440 +#define _LOAD_SMALL_INT_0 441 +#define _LOAD_SMALL_INT_1 442 +#define _LOAD_SMALL_INT_2 443 +#define _LOAD_SMALL_INT_3 444 #define _LOAD_SPECIAL LOAD_SPECIAL #define _LOAD_SUPER_ATTR_ATTR LOAD_SUPER_ATTR_ATTR #define _LOAD_SUPER_ATTR_METHOD LOAD_SUPER_ATTR_METHOD -#define _MAKE_CALLARGS_A_TUPLE 452 +#define _MAKE_CALLARGS_A_TUPLE 445 #define _MAKE_CELL MAKE_CELL #define _MAKE_FUNCTION MAKE_FUNCTION -#define _MAKE_WARM 453 +#define _MAKE_WARM 446 #define _MAP_ADD MAP_ADD #define _MATCH_CLASS MATCH_CLASS #define _MATCH_KEYS MATCH_KEYS #define _MATCH_MAPPING MATCH_MAPPING #define _MATCH_SEQUENCE MATCH_SEQUENCE -#define _MAYBE_EXPAND_METHOD 454 -#define _MAYBE_EXPAND_METHOD_KW 455 -#define _MONITOR_CALL 456 -#define _MONITOR_JUMP_BACKWARD 457 -#define _MONITOR_RESUME 458 +#define _MAYBE_EXPAND_METHOD 447 +#define _MAYBE_EXPAND_METHOD_KW 448 +#define _MONITOR_CALL 449 +#define _MONITOR_JUMP_BACKWARD 450 +#define _MONITOR_RESUME 451 #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_JUMP_IF_FALSE 459 -#define _POP_JUMP_IF_TRUE 460 +#define _POP_JUMP_IF_FALSE 452 +#define _POP_JUMP_IF_TRUE 453 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 461 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 454 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 462 +#define _PUSH_FRAME 455 #define _PUSH_NULL PUSH_NULL -#define _PY_FRAME_GENERAL 463 -#define _PY_FRAME_KW 464 -#define _QUICKEN_RESUME 465 -#define _REPLACE_WITH_TRUE 466 +#define _PUSH_NULL_CONDITIONAL 456 +#define _PY_FRAME_GENERAL 457 +#define _PY_FRAME_KW 458 +#define _QUICKEN_RESUME 459 +#define _REPLACE_WITH_TRUE 460 #define _RESUME_CHECK RESUME_CHECK #define _RETURN_GENERATOR RETURN_GENERATOR #define _RETURN_VALUE RETURN_VALUE -#define _SAVE_RETURN_OFFSET 467 -#define _SEND 468 -#define _SEND_GEN_FRAME 469 +#define _SAVE_RETURN_OFFSET 461 +#define _SEND 462 +#define _SEND_GEN_FRAME 463 #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 470 -#define _STORE_ATTR 471 -#define _STORE_ATTR_INSTANCE_VALUE 472 -#define _STORE_ATTR_SLOT 473 -#define _STORE_ATTR_WITH_HINT 474 +#define _START_EXECUTOR 464 +#define _STORE_ATTR 465 +#define _STORE_ATTR_INSTANCE_VALUE 466 +#define _STORE_ATTR_SLOT 467 +#define _STORE_ATTR_WITH_HINT 468 #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 475 -#define _STORE_FAST_0 476 -#define _STORE_FAST_1 477 -#define _STORE_FAST_2 478 -#define _STORE_FAST_3 479 -#define _STORE_FAST_4 480 -#define _STORE_FAST_5 481 -#define _STORE_FAST_6 482 -#define _STORE_FAST_7 483 +#define _STORE_FAST 469 +#define _STORE_FAST_0 470 +#define _STORE_FAST_1 471 +#define _STORE_FAST_2 472 +#define _STORE_FAST_3 473 +#define _STORE_FAST_4 474 +#define _STORE_FAST_5 475 +#define _STORE_FAST_6 476 +#define _STORE_FAST_7 477 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME -#define _STORE_SLICE 484 -#define _STORE_SUBSCR 485 +#define _STORE_SLICE 478 +#define _STORE_SUBSCR 479 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TIER2_RESUME_CHECK 486 -#define _TO_BOOL 487 +#define _TIER2_RESUME_CHECK 480 +#define _TO_BOOL 481 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -307,13 +303,13 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 488 +#define _UNPACK_SEQUENCE 482 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _YIELD_VALUE YIELD_VALUE -#define MAX_UOP_ID 488 +#define MAX_UOP_ID 482 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 73fc29eb78a7a4..0ed4c7c3a35436 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -35,26 +35,27 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG, [_LOAD_FAST_AND_CLEAR] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, [_LOAD_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, - [_LOAD_CONST] = HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_PURE_FLAG, + [_LOAD_CONST_MORTAL] = HAS_ARG_FLAG | HAS_CONST_FLAG, [_LOAD_CONST_IMMORTAL] = HAS_ARG_FLAG | HAS_CONST_FLAG, [_LOAD_SMALL_INT_0] = 0, [_LOAD_SMALL_INT_1] = 0, [_LOAD_SMALL_INT_2] = 0, [_LOAD_SMALL_INT_3] = 0, [_LOAD_SMALL_INT] = HAS_ARG_FLAG, - [_STORE_FAST_0] = HAS_LOCAL_FLAG, - [_STORE_FAST_1] = HAS_LOCAL_FLAG, - [_STORE_FAST_2] = HAS_LOCAL_FLAG, - [_STORE_FAST_3] = HAS_LOCAL_FLAG, - [_STORE_FAST_4] = HAS_LOCAL_FLAG, - [_STORE_FAST_5] = HAS_LOCAL_FLAG, - [_STORE_FAST_6] = HAS_LOCAL_FLAG, - [_STORE_FAST_7] = HAS_LOCAL_FLAG, - [_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, - [_STORE_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, - [_STORE_FAST_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_STORE_FAST_0] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_1] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_2] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_3] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_4] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_5] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_6] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_7] = HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, + [_STORE_FAST_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ESCAPES_FLAG, [_POP_TOP] = HAS_PURE_FLAG, [_PUSH_NULL] = HAS_PURE_FLAG, + [_END_FOR] = HAS_ESCAPES_FLAG | HAS_NO_SAVE_IP_FLAG, [_END_SEND] = HAS_PURE_FLAG, [_UNARY_NEGATIVE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_UNARY_NOT] = HAS_PURE_FLAG, @@ -81,19 +82,21 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG, [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG, [_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG, + [_GUARD_BINARY_OP_EXTEND] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_OP_EXTEND] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG, [_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, - [_BINARY_SUBSCR_STR_INT] = HAS_DEOPT_FLAG, - [_BINARY_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG, + [_BINARY_SUBSCR_STR_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SUBSCR_CHECK_FUNC] = HAS_DEOPT_FLAG, [_BINARY_SUBSCR_INIT_CALL] = 0, [_LIST_APPEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG, [_SET_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_STORE_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG, + [_STORE_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_STORE_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_DELETE_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_INTRINSIC_1] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -121,21 +124,22 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_LOCALS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_PUSH_NULL_CONDITIONAL] = HAS_ARG_FLAG, [_GUARD_GLOBALS_VERSION] = HAS_DEOPT_FLAG, [_GUARD_GLOBALS_VERSION_PUSH_KEYS] = HAS_DEOPT_FLAG, [_GUARD_BUILTINS_VERSION_PUSH_KEYS] = HAS_DEOPT_FLAG, - [_LOAD_GLOBAL_MODULE_FROM_KEYS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_LOAD_GLOBAL_BUILTINS_FROM_KEYS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_LOAD_GLOBAL_MODULE_FROM_KEYS] = HAS_DEOPT_FLAG, + [_LOAD_GLOBAL_BUILTINS_FROM_KEYS] = HAS_DEOPT_FLAG, [_DELETE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_MAKE_CELL] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG, + [_MAKE_CELL] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_DELETE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_FROM_DICT_OR_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG, [_COPY_FREE_VARS] = HAS_ARG_FLAG, [_BUILD_STRING] = HAS_ARG_FLAG | HAS_ERROR_FLAG, - [_BUILD_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG, - [_BUILD_LIST] = HAS_ARG_FLAG | HAS_ERROR_FLAG, + [_BUILD_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG, + [_BUILD_LIST] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG, [_LIST_EXTEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_SET_UPDATE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BUILD_SET] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -145,30 +149,24 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_DICT_MERGE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_MAP_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_SUPER_ATTR_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_LOAD_SUPER_ATTR_METHOD] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_SUPER_ATTR_METHOD] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GUARD_TYPE_VERSION] = HAS_EXIT_FLAG, [_GUARD_TYPE_VERSION_AND_LOCK] = HAS_EXIT_FLAG, [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_INSTANCE_VALUE_0] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_INSTANCE_VALUE_1] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_INSTANCE_VALUE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_OPARG_AND_1_FLAG, + [_LOAD_ATTR_INSTANCE_VALUE] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_CHECK_ATTR_MODULE_PUSH_KEYS] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_MODULE_FROM_KEYS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_LOAD_ATTR_MODULE_FROM_KEYS] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_CHECK_ATTR_WITH_HINT] = HAS_EXIT_FLAG, [_LOAD_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG, - [_LOAD_ATTR_SLOT_0] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_SLOT_1] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_SLOT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_OPARG_AND_1_FLAG, + [_LOAD_ATTR_SLOT] = HAS_DEOPT_FLAG, [_CHECK_ATTR_CLASS] = HAS_EXIT_FLAG, - [_LOAD_ATTR_CLASS_0] = 0, - [_LOAD_ATTR_CLASS_1] = 0, - [_LOAD_ATTR_CLASS] = HAS_ARG_FLAG | HAS_OPARG_AND_1_FLAG, + [_LOAD_ATTR_CLASS] = 0, [_LOAD_ATTR_PROPERTY_FRAME] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_GUARD_DORV_NO_DICT] = HAS_EXIT_FLAG, - [_STORE_ATTR_INSTANCE_VALUE] = 0, + [_STORE_ATTR_INSTANCE_VALUE] = HAS_ESCAPES_FLAG, [_STORE_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, - [_STORE_ATTR_SLOT] = HAS_DEOPT_FLAG, + [_STORE_ATTR_SLOT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_COMPARE_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_COMPARE_OP_FLOAT] = HAS_ARG_FLAG, [_COMPARE_OP_INT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, @@ -211,16 +209,16 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = HAS_ARG_FLAG, [_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG, [_LOAD_ATTR_METHOD_LAZY_DICT] = HAS_ARG_FLAG, - [_MAYBE_EXPAND_METHOD] = HAS_ARG_FLAG, + [_MAYBE_EXPAND_METHOD] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_PY_FRAME_GENERAL] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_CHECK_FUNCTION_VERSION] = HAS_ARG_FLAG | HAS_EXIT_FLAG, [_CHECK_FUNCTION_VERSION_INLINE] = HAS_EXIT_FLAG, [_CHECK_METHOD_VERSION] = HAS_ARG_FLAG | HAS_EXIT_FLAG, - [_EXPAND_METHOD] = HAS_ARG_FLAG, + [_EXPAND_METHOD] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_CHECK_IS_NOT_PY_CALLABLE] = HAS_ARG_FLAG | HAS_EXIT_FLAG, [_CALL_NON_PY_GENERAL] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_EXIT_FLAG, - [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG, + [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_CHECK_PEP_523] = HAS_DEOPT_FLAG, [_CHECK_FUNCTION_EXACT_ARGS] = HAS_ARG_FLAG | HAS_EXIT_FLAG, [_CHECK_STACK_SPACE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, @@ -231,7 +229,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_INIT_CALL_PY_EXACT_ARGS_4] = HAS_PURE_FLAG, [_INIT_CALL_PY_EXACT_ARGS] = HAS_ARG_FLAG | HAS_PURE_FLAG, [_PUSH_FRAME] = 0, - [_CALL_TYPE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CALL_TYPE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_CALL_STR_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_TUPLE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CHECK_AND_ALLOCATE_OBJECT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, @@ -243,19 +241,19 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_CALL_BUILTIN_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_LEN] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_CALL_ISINSTANCE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, - [_CALL_LIST_APPEND] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG, + [_CALL_LIST_APPEND] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_O] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_NOARGS] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_FAST] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_MAYBE_EXPAND_METHOD_KW] = HAS_ARG_FLAG, + [_MAYBE_EXPAND_METHOD_KW] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_PY_FRAME_KW] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_CHECK_FUNCTION_VERSION_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG, [_CHECK_METHOD_VERSION_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG, - [_EXPAND_METHOD_KW] = HAS_ARG_FLAG, + [_EXPAND_METHOD_KW] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_CHECK_IS_NOT_PY_CALLABLE_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG, [_CALL_KW_NON_PY] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_MAKE_CALLARGS_A_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, + [_MAKE_CALLARGS_A_TUPLE] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_MAKE_FUNCTION] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_SET_FUNCTION_ATTRIBUTE] = HAS_ARG_FLAG, [_RETURN_GENERATOR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -268,8 +266,8 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_SWAP] = HAS_ARG_FLAG | HAS_PURE_FLAG, [_GUARD_IS_TRUE_POP] = HAS_EXIT_FLAG, [_GUARD_IS_FALSE_POP] = HAS_EXIT_FLAG, - [_GUARD_IS_NONE_POP] = HAS_EXIT_FLAG, - [_GUARD_IS_NOT_NONE_POP] = HAS_EXIT_FLAG, + [_GUARD_IS_NONE_POP] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG, + [_GUARD_IS_NOT_NONE_POP] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG, [_JUMP_TO_TOP] = 0, [_SET_IP] = 0, [_CHECK_STACK_SPACE_OPERAND] = HAS_DEOPT_FLAG, @@ -278,16 +276,13 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_CHECK_VALIDITY] = HAS_DEOPT_FLAG, [_LOAD_CONST_INLINE] = HAS_PURE_FLAG, [_LOAD_CONST_INLINE_BORROW] = HAS_PURE_FLAG, - [_POP_TOP_LOAD_CONST_INLINE_BORROW] = HAS_PURE_FLAG, - [_LOAD_CONST_INLINE_WITH_NULL] = HAS_PURE_FLAG, - [_LOAD_CONST_INLINE_BORROW_WITH_NULL] = HAS_PURE_FLAG, + [_POP_TOP_LOAD_CONST_INLINE_BORROW] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG, [_CHECK_FUNCTION] = HAS_DEOPT_FLAG, - [_LOAD_GLOBAL_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_LOAD_GLOBAL_BUILTINS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_LOAD_ATTR_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_INTERNAL_INCREMENT_OPT_COUNTER] = 0, + [_LOAD_GLOBAL_MODULE] = HAS_DEOPT_FLAG, + [_LOAD_GLOBAL_BUILTINS] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_MODULE] = HAS_DEOPT_FLAG, [_DYNAMIC_EXIT] = HAS_ESCAPES_FLAG, - [_START_EXECUTOR] = 0, + [_START_EXECUTOR] = HAS_ESCAPES_FLAG, [_MAKE_WARM] = 0, [_FATAL_ERROR] = 0, [_CHECK_VALIDITY_AND_SET_IP] = HAS_DEOPT_FLAG, @@ -308,6 +303,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT", [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", [_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE", + [_BINARY_OP_EXTEND] = "_BINARY_OP_EXTEND", [_BINARY_OP_INPLACE_ADD_UNICODE] = "_BINARY_OP_INPLACE_ADD_UNICODE", [_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT", [_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT", @@ -391,6 +387,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_DICT_MERGE] = "_DICT_MERGE", [_DICT_UPDATE] = "_DICT_UPDATE", [_DYNAMIC_EXIT] = "_DYNAMIC_EXIT", + [_END_FOR] = "_END_FOR", [_END_SEND] = "_END_SEND", [_ERROR_POP_N] = "_ERROR_POP_N", [_EXIT_INIT_CHECK] = "_EXIT_INIT_CHECK", @@ -408,6 +405,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_GET_ITER] = "_GET_ITER", [_GET_LEN] = "_GET_LEN", [_GET_YIELD_FROM_ITER] = "_GET_YIELD_FROM_ITER", + [_GUARD_BINARY_OP_EXTEND] = "_GUARD_BINARY_OP_EXTEND", [_GUARD_BOTH_FLOAT] = "_GUARD_BOTH_FLOAT", [_GUARD_BOTH_INT] = "_GUARD_BOTH_INT", [_GUARD_BOTH_UNICODE] = "_GUARD_BOTH_UNICODE", @@ -439,7 +437,6 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_INIT_CALL_PY_EXACT_ARGS_2] = "_INIT_CALL_PY_EXACT_ARGS_2", [_INIT_CALL_PY_EXACT_ARGS_3] = "_INIT_CALL_PY_EXACT_ARGS_3", [_INIT_CALL_PY_EXACT_ARGS_4] = "_INIT_CALL_PY_EXACT_ARGS_4", - [_INTERNAL_INCREMENT_OPT_COUNTER] = "_INTERNAL_INCREMENT_OPT_COUNTER", [_IS_NONE] = "_IS_NONE", [_IS_OP] = "_IS_OP", [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", @@ -453,11 +450,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_LIST_EXTEND] = "_LIST_EXTEND", [_LOAD_ATTR] = "_LOAD_ATTR", [_LOAD_ATTR_CLASS] = "_LOAD_ATTR_CLASS", - [_LOAD_ATTR_CLASS_0] = "_LOAD_ATTR_CLASS_0", - [_LOAD_ATTR_CLASS_1] = "_LOAD_ATTR_CLASS_1", [_LOAD_ATTR_INSTANCE_VALUE] = "_LOAD_ATTR_INSTANCE_VALUE", - [_LOAD_ATTR_INSTANCE_VALUE_0] = "_LOAD_ATTR_INSTANCE_VALUE_0", - [_LOAD_ATTR_INSTANCE_VALUE_1] = "_LOAD_ATTR_INSTANCE_VALUE_1", [_LOAD_ATTR_METHOD_LAZY_DICT] = "_LOAD_ATTR_METHOD_LAZY_DICT", [_LOAD_ATTR_METHOD_NO_DICT] = "_LOAD_ATTR_METHOD_NO_DICT", [_LOAD_ATTR_METHOD_WITH_VALUES] = "_LOAD_ATTR_METHOD_WITH_VALUES", @@ -467,17 +460,13 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", [_LOAD_ATTR_PROPERTY_FRAME] = "_LOAD_ATTR_PROPERTY_FRAME", [_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT", - [_LOAD_ATTR_SLOT_0] = "_LOAD_ATTR_SLOT_0", - [_LOAD_ATTR_SLOT_1] = "_LOAD_ATTR_SLOT_1", [_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT", [_LOAD_BUILD_CLASS] = "_LOAD_BUILD_CLASS", [_LOAD_COMMON_CONSTANT] = "_LOAD_COMMON_CONSTANT", - [_LOAD_CONST] = "_LOAD_CONST", [_LOAD_CONST_IMMORTAL] = "_LOAD_CONST_IMMORTAL", [_LOAD_CONST_INLINE] = "_LOAD_CONST_INLINE", [_LOAD_CONST_INLINE_BORROW] = "_LOAD_CONST_INLINE_BORROW", - [_LOAD_CONST_INLINE_BORROW_WITH_NULL] = "_LOAD_CONST_INLINE_BORROW_WITH_NULL", - [_LOAD_CONST_INLINE_WITH_NULL] = "_LOAD_CONST_INLINE_WITH_NULL", + [_LOAD_CONST_MORTAL] = "_LOAD_CONST_MORTAL", [_LOAD_DEREF] = "_LOAD_DEREF", [_LOAD_FAST] = "_LOAD_FAST", [_LOAD_FAST_0] = "_LOAD_FAST_0", @@ -525,6 +514,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_PUSH_EXC_INFO] = "_PUSH_EXC_INFO", [_PUSH_FRAME] = "_PUSH_FRAME", [_PUSH_NULL] = "_PUSH_NULL", + [_PUSH_NULL_CONDITIONAL] = "_PUSH_NULL_CONDITIONAL", [_PY_FRAME_GENERAL] = "_PY_FRAME_GENERAL", [_PY_FRAME_KW] = "_PY_FRAME_KW", [_REPLACE_WITH_TRUE] = "_REPLACE_WITH_TRUE", @@ -615,7 +605,7 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _LOAD_FAST_LOAD_FAST: return 0; - case _LOAD_CONST: + case _LOAD_CONST_MORTAL: return 0; case _LOAD_CONST_IMMORTAL: return 0; @@ -655,6 +645,8 @@ int _PyUop_num_popped(int opcode, int oparg) return 1; case _PUSH_NULL: return 0; + case _END_FOR: + return 1; case _END_SEND: return 2; case _UNARY_NEGATIVE: @@ -707,6 +699,10 @@ int _PyUop_num_popped(int opcode, int oparg) return 2; case _BINARY_OP_INPLACE_ADD_UNICODE: return 2; + case _GUARD_BINARY_OP_EXTEND: + return 0; + case _BINARY_OP_EXTEND: + return 2; case _BINARY_SUBSCR: return 2; case _BINARY_SLICE: @@ -787,6 +783,8 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _LOAD_GLOBAL: return 0; + case _PUSH_NULL_CONDITIONAL: + return 0; case _GUARD_GLOBALS_VERSION: return 0; case _GUARD_GLOBALS_VERSION_PUSH_KEYS: @@ -845,10 +843,6 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _CHECK_MANAGED_OBJECT_HAS_VALUES: return 0; - case _LOAD_ATTR_INSTANCE_VALUE_0: - return 1; - case _LOAD_ATTR_INSTANCE_VALUE_1: - return 1; case _LOAD_ATTR_INSTANCE_VALUE: return 1; case _CHECK_ATTR_MODULE_PUSH_KEYS: @@ -858,19 +852,11 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_ATTR_WITH_HINT: return 0; case _LOAD_ATTR_WITH_HINT: - return 1; - case _LOAD_ATTR_SLOT_0: - return 1; - case _LOAD_ATTR_SLOT_1: - return 1; + return 2; case _LOAD_ATTR_SLOT: return 1; case _CHECK_ATTR_CLASS: return 0; - case _LOAD_ATTR_CLASS_0: - return 1; - case _LOAD_ATTR_CLASS_1: - return 1; case _LOAD_ATTR_CLASS: return 1; case _LOAD_ATTR_PROPERTY_FRAME: @@ -978,7 +964,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_METHOD_VERSION: return 0; case _EXPAND_METHOD: - return 2 + oparg; + return 0; case _CHECK_IS_NOT_PY_CALLABLE: return 0; case _CALL_NON_PY_GENERAL: @@ -986,7 +972,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: return 0; case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: - return 2 + oparg; + return 0; case _CHECK_PEP_523: return 0; case _CHECK_FUNCTION_EXACT_ARGS: @@ -1050,13 +1036,13 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_METHOD_VERSION_KW: return 0; case _EXPAND_METHOD_KW: - return 3 + oparg; + return 0; case _CHECK_IS_NOT_PY_CALLABLE_KW: return 0; case _CALL_KW_NON_PY: return 3 + oparg; case _MAKE_CALLARGS_A_TUPLE: - return 1 + (oparg & 1); + return 2; case _MAKE_FUNCTION: return 1; case _SET_FUNCTION_ATTRIBUTE: @@ -1064,7 +1050,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _RETURN_GENERATOR: return 0; case _BUILD_SLICE: - return 2 + ((oparg == 3) ? 1 : 0); + return oparg; case _CONVERT_VALUE: return 1; case _FORMAT_SIMPLE: @@ -1076,7 +1062,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _BINARY_OP: return 2; case _SWAP: - return 2 + (oparg-2); + return 0; case _GUARD_IS_TRUE_POP: return 1; case _GUARD_IS_FALSE_POP: @@ -1103,10 +1089,6 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _POP_TOP_LOAD_CONST_INLINE_BORROW: return 1; - case _LOAD_CONST_INLINE_WITH_NULL: - return 0; - case _LOAD_CONST_INLINE_BORROW_WITH_NULL: - return 0; case _CHECK_FUNCTION: return 0; case _LOAD_GLOBAL_MODULE: @@ -1115,8 +1097,6 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _LOAD_ATTR_MODULE: return 1; - case _INTERNAL_INCREMENT_OPT_COUNTER: - return 1; case _DYNAMIC_EXIT: return 0; case _START_EXECUTOR: @@ -1130,7 +1110,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _DEOPT: return 0; case _ERROR_POP_N: - return oparg; + return 0; case _TIER2_RESUME_CHECK: return 0; default: diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h index f9f6559312f4ef..672228cd6fbd19 100644 --- a/Include/internal/pycore_warnings.h +++ b/Include/internal/pycore_warnings.h @@ -14,7 +14,7 @@ struct _warnings_runtime_state { PyObject *filters; /* List */ PyObject *once_registry; /* Dict */ PyObject *default_action; /* String */ - PyMutex mutex; + _PyRecursiveMutex lock; long filters_version; }; diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h index 3cd189b93dd9d6..4a9fc15dcd2880 100644 --- a/Include/opcode_ids.h +++ b/Include/opcode_ids.h @@ -14,199 +14,205 @@ extern "C" { #define BINARY_SLICE 1 #define BINARY_SUBSCR 2 #define BINARY_OP_INPLACE_ADD_UNICODE 3 -#define CHECK_EG_MATCH 4 -#define CHECK_EXC_MATCH 5 -#define CLEANUP_THROW 6 -#define DELETE_SUBSCR 7 -#define END_ASYNC_FOR 8 -#define END_FOR 9 -#define END_SEND 10 -#define EXIT_INIT_CHECK 11 -#define FORMAT_SIMPLE 12 -#define FORMAT_WITH_SPEC 13 -#define GET_AITER 14 -#define GET_ANEXT 15 -#define GET_ITER 16 +#define CALL_FUNCTION_EX 4 +#define CHECK_EG_MATCH 5 +#define CHECK_EXC_MATCH 6 +#define CLEANUP_THROW 7 +#define DELETE_SUBSCR 8 +#define END_ASYNC_FOR 9 +#define END_FOR 10 +#define END_SEND 11 +#define EXIT_INIT_CHECK 12 +#define FORMAT_SIMPLE 13 +#define FORMAT_WITH_SPEC 14 +#define GET_AITER 15 +#define GET_ANEXT 16 #define RESERVED 17 -#define GET_LEN 18 -#define GET_YIELD_FROM_ITER 19 -#define INTERPRETER_EXIT 20 -#define LOAD_BUILD_CLASS 21 -#define LOAD_LOCALS 22 -#define MAKE_FUNCTION 23 -#define MATCH_KEYS 24 -#define MATCH_MAPPING 25 -#define MATCH_SEQUENCE 26 -#define NOP 27 -#define NOT_TAKEN 28 -#define POP_EXCEPT 29 -#define POP_TOP 30 -#define PUSH_EXC_INFO 31 -#define PUSH_NULL 32 -#define RETURN_GENERATOR 33 -#define RETURN_VALUE 34 -#define SETUP_ANNOTATIONS 35 -#define STORE_SLICE 36 -#define STORE_SUBSCR 37 -#define TO_BOOL 38 -#define UNARY_INVERT 39 -#define UNARY_NEGATIVE 40 -#define UNARY_NOT 41 -#define WITH_EXCEPT_START 42 -#define BINARY_OP 43 -#define BUILD_LIST 44 -#define BUILD_MAP 45 -#define BUILD_SET 46 -#define BUILD_SLICE 47 -#define BUILD_STRING 48 -#define BUILD_TUPLE 49 -#define CALL 50 -#define CALL_FUNCTION_EX 51 -#define CALL_INTRINSIC_1 52 -#define CALL_INTRINSIC_2 53 -#define CALL_KW 54 -#define COMPARE_OP 55 -#define CONTAINS_OP 56 -#define CONVERT_VALUE 57 -#define COPY 58 -#define COPY_FREE_VARS 59 -#define DELETE_ATTR 60 -#define DELETE_DEREF 61 -#define DELETE_FAST 62 -#define DELETE_GLOBAL 63 -#define DELETE_NAME 64 -#define DICT_MERGE 65 -#define DICT_UPDATE 66 -#define EXTENDED_ARG 67 -#define FOR_ITER 68 -#define GET_AWAITABLE 69 -#define IMPORT_FROM 70 -#define IMPORT_NAME 71 -#define IS_OP 72 -#define JUMP_BACKWARD 73 -#define JUMP_BACKWARD_NO_INTERRUPT 74 -#define JUMP_FORWARD 75 -#define LIST_APPEND 76 -#define LIST_EXTEND 77 -#define LOAD_ATTR 78 -#define LOAD_COMMON_CONSTANT 79 -#define LOAD_CONST 80 -#define LOAD_DEREF 81 -#define LOAD_FAST 82 -#define LOAD_FAST_AND_CLEAR 83 -#define LOAD_FAST_CHECK 84 -#define LOAD_FAST_LOAD_FAST 85 -#define LOAD_FROM_DICT_OR_DEREF 86 -#define LOAD_FROM_DICT_OR_GLOBALS 87 -#define LOAD_GLOBAL 88 -#define LOAD_NAME 89 -#define LOAD_SMALL_INT 90 -#define LOAD_SPECIAL 91 -#define LOAD_SUPER_ATTR 92 -#define MAKE_CELL 93 -#define MAP_ADD 94 -#define MATCH_CLASS 95 -#define POP_JUMP_IF_FALSE 96 -#define POP_JUMP_IF_NONE 97 -#define POP_JUMP_IF_NOT_NONE 98 -#define POP_JUMP_IF_TRUE 99 -#define RAISE_VARARGS 100 -#define RERAISE 101 -#define SEND 102 -#define SET_ADD 103 -#define SET_FUNCTION_ATTRIBUTE 104 -#define SET_UPDATE 105 -#define STORE_ATTR 106 -#define STORE_DEREF 107 -#define STORE_FAST 108 -#define STORE_FAST_LOAD_FAST 109 -#define STORE_FAST_STORE_FAST 110 -#define STORE_GLOBAL 111 -#define STORE_NAME 112 -#define SWAP 113 -#define UNPACK_EX 114 -#define UNPACK_SEQUENCE 115 -#define YIELD_VALUE 116 +#define GET_ITER 18 +#define GET_LEN 19 +#define GET_YIELD_FROM_ITER 20 +#define INTERPRETER_EXIT 21 +#define LOAD_BUILD_CLASS 22 +#define LOAD_LOCALS 23 +#define MAKE_FUNCTION 24 +#define MATCH_KEYS 25 +#define MATCH_MAPPING 26 +#define MATCH_SEQUENCE 27 +#define NOP 28 +#define NOT_TAKEN 29 +#define POP_EXCEPT 30 +#define POP_ITER 31 +#define POP_TOP 32 +#define PUSH_EXC_INFO 33 +#define PUSH_NULL 34 +#define RETURN_GENERATOR 35 +#define RETURN_VALUE 36 +#define SETUP_ANNOTATIONS 37 +#define STORE_SLICE 38 +#define STORE_SUBSCR 39 +#define TO_BOOL 40 +#define UNARY_INVERT 41 +#define UNARY_NEGATIVE 42 +#define UNARY_NOT 43 +#define WITH_EXCEPT_START 44 +#define BINARY_OP 45 +#define BUILD_LIST 46 +#define BUILD_MAP 47 +#define BUILD_SET 48 +#define BUILD_SLICE 49 +#define BUILD_STRING 50 +#define BUILD_TUPLE 51 +#define CALL 52 +#define CALL_INTRINSIC_1 53 +#define CALL_INTRINSIC_2 54 +#define CALL_KW 55 +#define COMPARE_OP 56 +#define CONTAINS_OP 57 +#define CONVERT_VALUE 58 +#define COPY 59 +#define COPY_FREE_VARS 60 +#define DELETE_ATTR 61 +#define DELETE_DEREF 62 +#define DELETE_FAST 63 +#define DELETE_GLOBAL 64 +#define DELETE_NAME 65 +#define DICT_MERGE 66 +#define DICT_UPDATE 67 +#define EXTENDED_ARG 68 +#define FOR_ITER 69 +#define GET_AWAITABLE 70 +#define IMPORT_FROM 71 +#define IMPORT_NAME 72 +#define IS_OP 73 +#define JUMP_BACKWARD 74 +#define JUMP_BACKWARD_NO_INTERRUPT 75 +#define JUMP_FORWARD 76 +#define LIST_APPEND 77 +#define LIST_EXTEND 78 +#define LOAD_ATTR 79 +#define LOAD_COMMON_CONSTANT 80 +#define LOAD_CONST 81 +#define LOAD_DEREF 82 +#define LOAD_FAST 83 +#define LOAD_FAST_AND_CLEAR 84 +#define LOAD_FAST_CHECK 85 +#define LOAD_FAST_LOAD_FAST 86 +#define LOAD_FROM_DICT_OR_DEREF 87 +#define LOAD_FROM_DICT_OR_GLOBALS 88 +#define LOAD_GLOBAL 89 +#define LOAD_NAME 90 +#define LOAD_SMALL_INT 91 +#define LOAD_SPECIAL 92 +#define LOAD_SUPER_ATTR 93 +#define MAKE_CELL 94 +#define MAP_ADD 95 +#define MATCH_CLASS 96 +#define POP_JUMP_IF_FALSE 97 +#define POP_JUMP_IF_NONE 98 +#define POP_JUMP_IF_NOT_NONE 99 +#define POP_JUMP_IF_TRUE 100 +#define RAISE_VARARGS 101 +#define RERAISE 102 +#define SEND 103 +#define SET_ADD 104 +#define SET_FUNCTION_ATTRIBUTE 105 +#define SET_UPDATE 106 +#define STORE_ATTR 107 +#define STORE_DEREF 108 +#define STORE_FAST 109 +#define STORE_FAST_LOAD_FAST 110 +#define STORE_FAST_STORE_FAST 111 +#define STORE_GLOBAL 112 +#define STORE_NAME 113 +#define SWAP 114 +#define UNPACK_EX 115 +#define UNPACK_SEQUENCE 116 +#define YIELD_VALUE 117 #define RESUME 149 #define BINARY_OP_ADD_FLOAT 150 #define BINARY_OP_ADD_INT 151 #define BINARY_OP_ADD_UNICODE 152 -#define BINARY_OP_MULTIPLY_FLOAT 153 -#define BINARY_OP_MULTIPLY_INT 154 -#define BINARY_OP_SUBTRACT_FLOAT 155 -#define BINARY_OP_SUBTRACT_INT 156 -#define BINARY_SUBSCR_DICT 157 -#define BINARY_SUBSCR_GETITEM 158 -#define BINARY_SUBSCR_LIST_INT 159 -#define BINARY_SUBSCR_STR_INT 160 -#define BINARY_SUBSCR_TUPLE_INT 161 -#define CALL_ALLOC_AND_ENTER_INIT 162 -#define CALL_BOUND_METHOD_EXACT_ARGS 163 -#define CALL_BOUND_METHOD_GENERAL 164 -#define CALL_BUILTIN_CLASS 165 -#define CALL_BUILTIN_FAST 166 -#define CALL_BUILTIN_FAST_WITH_KEYWORDS 167 -#define CALL_BUILTIN_O 168 -#define CALL_ISINSTANCE 169 -#define CALL_KW_BOUND_METHOD 170 -#define CALL_KW_NON_PY 171 -#define CALL_KW_PY 172 -#define CALL_LEN 173 -#define CALL_LIST_APPEND 174 -#define CALL_METHOD_DESCRIPTOR_FAST 175 -#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 176 -#define CALL_METHOD_DESCRIPTOR_NOARGS 177 -#define CALL_METHOD_DESCRIPTOR_O 178 -#define CALL_NON_PY_GENERAL 179 -#define CALL_PY_EXACT_ARGS 180 -#define CALL_PY_GENERAL 181 -#define CALL_STR_1 182 -#define CALL_TUPLE_1 183 -#define CALL_TYPE_1 184 -#define COMPARE_OP_FLOAT 185 -#define COMPARE_OP_INT 186 -#define COMPARE_OP_STR 187 -#define CONTAINS_OP_DICT 188 -#define CONTAINS_OP_SET 189 -#define FOR_ITER_GEN 190 -#define FOR_ITER_LIST 191 -#define FOR_ITER_RANGE 192 -#define FOR_ITER_TUPLE 193 -#define LOAD_ATTR_CLASS 194 -#define LOAD_ATTR_CLASS_WITH_METACLASS_CHECK 195 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 196 -#define LOAD_ATTR_INSTANCE_VALUE 197 -#define LOAD_ATTR_METHOD_LAZY_DICT 198 -#define LOAD_ATTR_METHOD_NO_DICT 199 -#define LOAD_ATTR_METHOD_WITH_VALUES 200 -#define LOAD_ATTR_MODULE 201 -#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 202 -#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 203 -#define LOAD_ATTR_PROPERTY 204 -#define LOAD_ATTR_SLOT 205 -#define LOAD_ATTR_WITH_HINT 206 -#define LOAD_CONST_IMMORTAL 207 -#define LOAD_GLOBAL_BUILTIN 208 -#define LOAD_GLOBAL_MODULE 209 -#define LOAD_SUPER_ATTR_ATTR 210 -#define LOAD_SUPER_ATTR_METHOD 211 -#define RESUME_CHECK 212 -#define SEND_GEN 213 -#define STORE_ATTR_INSTANCE_VALUE 214 -#define STORE_ATTR_SLOT 215 -#define STORE_ATTR_WITH_HINT 216 -#define STORE_SUBSCR_DICT 217 -#define STORE_SUBSCR_LIST_INT 218 -#define TO_BOOL_ALWAYS_TRUE 219 -#define TO_BOOL_BOOL 220 -#define TO_BOOL_INT 221 -#define TO_BOOL_LIST 222 -#define TO_BOOL_NONE 223 -#define TO_BOOL_STR 224 -#define UNPACK_SEQUENCE_LIST 225 -#define UNPACK_SEQUENCE_TUPLE 226 -#define UNPACK_SEQUENCE_TWO_TUPLE 227 -#define INSTRUMENTED_END_FOR 236 +#define BINARY_OP_EXTEND 153 +#define BINARY_OP_MULTIPLY_FLOAT 154 +#define BINARY_OP_MULTIPLY_INT 155 +#define BINARY_OP_SUBTRACT_FLOAT 156 +#define BINARY_OP_SUBTRACT_INT 157 +#define BINARY_SUBSCR_DICT 158 +#define BINARY_SUBSCR_GETITEM 159 +#define BINARY_SUBSCR_LIST_INT 160 +#define BINARY_SUBSCR_STR_INT 161 +#define BINARY_SUBSCR_TUPLE_INT 162 +#define CALL_ALLOC_AND_ENTER_INIT 163 +#define CALL_BOUND_METHOD_EXACT_ARGS 164 +#define CALL_BOUND_METHOD_GENERAL 165 +#define CALL_BUILTIN_CLASS 166 +#define CALL_BUILTIN_FAST 167 +#define CALL_BUILTIN_FAST_WITH_KEYWORDS 168 +#define CALL_BUILTIN_O 169 +#define CALL_ISINSTANCE 170 +#define CALL_KW_BOUND_METHOD 171 +#define CALL_KW_NON_PY 172 +#define CALL_KW_PY 173 +#define CALL_LEN 174 +#define CALL_LIST_APPEND 175 +#define CALL_METHOD_DESCRIPTOR_FAST 176 +#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 177 +#define CALL_METHOD_DESCRIPTOR_NOARGS 178 +#define CALL_METHOD_DESCRIPTOR_O 179 +#define CALL_NON_PY_GENERAL 180 +#define CALL_PY_EXACT_ARGS 181 +#define CALL_PY_GENERAL 182 +#define CALL_STR_1 183 +#define CALL_TUPLE_1 184 +#define CALL_TYPE_1 185 +#define COMPARE_OP_FLOAT 186 +#define COMPARE_OP_INT 187 +#define COMPARE_OP_STR 188 +#define CONTAINS_OP_DICT 189 +#define CONTAINS_OP_SET 190 +#define FOR_ITER_GEN 191 +#define FOR_ITER_LIST 192 +#define FOR_ITER_RANGE 193 +#define FOR_ITER_TUPLE 194 +#define JUMP_BACKWARD_JIT 195 +#define JUMP_BACKWARD_NO_JIT 196 +#define LOAD_ATTR_CLASS 197 +#define LOAD_ATTR_CLASS_WITH_METACLASS_CHECK 198 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 199 +#define LOAD_ATTR_INSTANCE_VALUE 200 +#define LOAD_ATTR_METHOD_LAZY_DICT 201 +#define LOAD_ATTR_METHOD_NO_DICT 202 +#define LOAD_ATTR_METHOD_WITH_VALUES 203 +#define LOAD_ATTR_MODULE 204 +#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 205 +#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 206 +#define LOAD_ATTR_PROPERTY 207 +#define LOAD_ATTR_SLOT 208 +#define LOAD_ATTR_WITH_HINT 209 +#define LOAD_CONST_IMMORTAL 210 +#define LOAD_CONST_MORTAL 211 +#define LOAD_GLOBAL_BUILTIN 212 +#define LOAD_GLOBAL_MODULE 213 +#define LOAD_SUPER_ATTR_ATTR 214 +#define LOAD_SUPER_ATTR_METHOD 215 +#define RESUME_CHECK 216 +#define SEND_GEN 217 +#define STORE_ATTR_INSTANCE_VALUE 218 +#define STORE_ATTR_SLOT 219 +#define STORE_ATTR_WITH_HINT 220 +#define STORE_SUBSCR_DICT 221 +#define STORE_SUBSCR_LIST_INT 222 +#define TO_BOOL_ALWAYS_TRUE 223 +#define TO_BOOL_BOOL 224 +#define TO_BOOL_INT 225 +#define TO_BOOL_LIST 226 +#define TO_BOOL_NONE 227 +#define TO_BOOL_STR 228 +#define UNPACK_SEQUENCE_LIST 229 +#define UNPACK_SEQUENCE_TUPLE 230 +#define UNPACK_SEQUENCE_TWO_TUPLE 231 +#define INSTRUMENTED_END_FOR 235 +#define INSTRUMENTED_POP_ITER 236 #define INSTRUMENTED_END_SEND 237 #define INSTRUMENTED_LOAD_SUPER_ATTR 238 #define INSTRUMENTED_FOR_ITER 239 @@ -237,9 +243,9 @@ extern "C" { #define SETUP_WITH 264 #define STORE_FAST_MAYBE_NULL 265 -#define HAVE_ARGUMENT 42 +#define HAVE_ARGUMENT 44 #define MIN_SPECIALIZED_OPCODE 150 -#define MIN_INSTRUMENTED_OPCODE 236 +#define MIN_INSTRUMENTED_OPCODE 235 #ifdef __cplusplus } diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 6d4f719fcde5a8..40e7d73b7a6634 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -1,4 +1,5 @@ - +#ifndef _Py_PATCHLEVEL_H +#define _Py_PATCHLEVEL_H /* Python version identification scheme. When the major or minor version changes, the VERSION variable in @@ -20,16 +21,29 @@ #define PY_MINOR_VERSION 14 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 3 +#define PY_RELEASE_SERIAL 4 /* Version as a string */ -#define PY_VERSION "3.14.0a3+" +#define PY_VERSION "3.14.0a4+" /*--end constants--*/ + +#define _Py_PACK_FULL_VERSION(X, Y, Z, LEVEL, SERIAL) ( \ + (((X) & 0xff) << 24) | \ + (((Y) & 0xff) << 16) | \ + (((Z) & 0xff) << 8) | \ + (((LEVEL) & 0xf) << 4) | \ + (((SERIAL) & 0xf) << 0)) + /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. Use this for numeric comparisons, e.g. #if PY_VERSION_HEX >= ... */ -#define PY_VERSION_HEX ((PY_MAJOR_VERSION << 24) | \ - (PY_MINOR_VERSION << 16) | \ - (PY_MICRO_VERSION << 8) | \ - (PY_RELEASE_LEVEL << 4) | \ - (PY_RELEASE_SERIAL << 0)) +#define PY_VERSION_HEX _Py_PACK_FULL_VERSION( \ + PY_MAJOR_VERSION, \ + PY_MINOR_VERSION, \ + PY_MICRO_VERSION, \ + PY_RELEASE_LEVEL, \ + PY_RELEASE_SERIAL) + +// Public Py_PACK_VERSION is declared in pymacro.h; it needs . + +#endif //_Py_PATCHLEVEL_H diff --git a/Include/pymacro.h b/Include/pymacro.h index e0378f9d27a048..a82f347866e8d0 100644 --- a/Include/pymacro.h +++ b/Include/pymacro.h @@ -190,4 +190,13 @@ // "comparison of unsigned expression in '< 0' is always false". #define _Py_IS_TYPE_SIGNED(type) ((type)(-1) <= 0) +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030E0000 // 3.14 +// Version helpers. These are primarily macros, but have exported equivalents. +PyAPI_FUNC(uint32_t) Py_PACK_FULL_VERSION(int x, int y, int z, int level, int serial); +PyAPI_FUNC(uint32_t) Py_PACK_VERSION(int x, int y); +#define Py_PACK_FULL_VERSION _Py_PACK_FULL_VERSION +#define Py_PACK_VERSION(X, Y) Py_PACK_FULL_VERSION(X, Y, 0, 0, 0) +#endif // Py_LIMITED_API < 3.14 + + #endif /* Py_PYMACRO_H */ diff --git a/Include/pyport.h b/Include/pyport.h index 2b6bd4c21110e5..aabd094df54a74 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -625,8 +625,13 @@ extern "C" { // case 2: code; break; // } // -// __attribute__((fallthrough)) was introduced in GCC 7. -#if _Py__has_attribute(fallthrough) +// __attribute__((fallthrough)) was introduced in GCC 7 and Clang 10 / +// Apple Clang 12.0. Earlier Clang versions support only the C++11 +// style fallthrough attribute, not the GCC extension syntax used here, +// and __has_attribute(fallthrough) evaluates to 1. +#if _Py__has_attribute(fallthrough) && (!defined(__clang__) || \ + (!defined(__apple_build_version__) && __clang_major__ >= 10) || \ + (defined(__apple_build_version__) && __clang_major__ >= 12)) # define _Py_FALLTHROUGH __attribute__((fallthrough)) #else # define _Py_FALLTHROUGH do { } while (0) diff --git a/InternalDocs/README.md b/InternalDocs/README.md index 794b4f3c6aad42..4502902307cd5c 100644 --- a/InternalDocs/README.md +++ b/InternalDocs/README.md @@ -25,7 +25,7 @@ Runtime Objects - [Code Objects](code_objects.md) -- [Generators (coming soon)](generators.md) +- [Generators](generators.md) - [Frames](frames.md) diff --git a/InternalDocs/compiler.md b/InternalDocs/compiler.md index c257bfd9faf78f..8ca19a42b91b83 100644 --- a/InternalDocs/compiler.md +++ b/InternalDocs/compiler.md @@ -401,7 +401,7 @@ Emission of bytecode is handled by the following macros: add the `LOAD_CONST` opcode with the proper argument based on the position of the specified PyObject in the consts table. * `ADDOP_LOAD_CONST_NEW(struct compiler *, location, PyObject *)`: - just like `ADDOP_LOAD_CONST_NEW`, but steals a reference to PyObject + just like `ADDOP_LOAD_CONST`, but steals a reference to PyObject * `ADDOP_JUMP(struct compiler *, location, int, basicblock *)`: create a jump to a basic block diff --git a/InternalDocs/generators.md b/InternalDocs/generators.md index afa8b8f4bb8040..87fbb91236844b 100644 --- a/InternalDocs/generators.md +++ b/InternalDocs/generators.md @@ -1,8 +1,106 @@ + +Generators and Coroutines +========================= + Generators -========== +---------- + +Generators in CPython are implemented with the struct `PyGenObject`. +They consist of a [`frame`](frames.md) and metadata about the generator's +execution state. + +A generator object resumes execution in its frame when its `send()` +method is called. This is analogous to a function executing in its own +frame when it is called, but a function returns to the calling frame only once, +while a generator "returns" execution to the caller's frame every time +it emits a new item with a +[`yield` expression](https://docs.python.org/dev/reference/expressions.html#yield-expressions). +This is implemented by the +[`YIELD_VALUE`](https://docs.python.org/dev/library/dis.html#opcode-YIELD_VALUE) +bytecode, which is similar to +[`RETURN_VALUE`](https://docs.python.org/dev/library/dis.html#opcode-RETURN_VALUE) +in the sense that it puts a value on the stack and returns execution to the +calling frame, but it also needs to perform additional work to leave the generator +frame in a state that allows it to be resumed. In particular, it updates the frame's +instruction pointer and stores the interpreter's exception state on the generator +object. When the generator is resumed, this exception state is copied back to the +interpreter state. + +The `frame` of a generator is embedded in the generator object struct as a +[`_PyInterpreterFrame`](frames.md) (see `_PyGenObject_HEAD` in +[`pycore_genobject.h`](../Include/internal/pycore_genobject.h)). +This means that we can get the frame from the generator or the generator +from the frame (see `_PyGen_GetGeneratorFromFrame` in the same file). +Other fields of the generator struct include metadata (such as the name of +the generator function) and runtime state information (such as whether its +frame is executing, suspended, cleared, etc.). + +Generator Object Creation and Destruction +----------------------------------------- + +The bytecode of a generator function begins with a +[`RETURN_GENERATOR`](https://docs.python.org/dev/library/dis.html#opcode-RETURN_GENERATOR) +instruction, which creates a generator object, including its embedded frame. +The generator's frame is initialized as a copy of the frame in which +`RETURN_GENERATOR` is executing, but its `owner` field is overwritten to indicate +that it is owned by a generator. Finally, `RETURN_GENERATOR` pushes the new generator +object to the stack and returns to the caller of the generator function (at +which time its frame is destroyed). When the generator is next resumed by +[`gen_send_ex2()`](../Objects/genobject.c), `_PyEval_EvalFrame()` is called +to continue executing the generator function, in the frame that is embedded in +the generator object. + +When a generator object is destroyed in [`gen_dealloc`](../Objects/genobject.c), +its embedded `_PyInterpreterFrame` field may need to be preserved, if it is exposed +to Python as part of a [`PyFrameObject`](frames.md#frame-objects). This is detected +in [`_PyFrame_ClearExceptCode`](../Python/frame.c) by the fact that the interpreter +frame's `frame_obj` field is set, and the frame object it points to has refcount +greater than 1. If so, the `take_ownership()` function is called to create a new +copy of the interpreter frame and transfer ownership of it from the generator to +the frame object. + +Iteration +--------- + +The [`FOR_ITER`](https://docs.python.org/dev/library/dis.html#opcode-FOR_ITER) +instruction calls `__next__` on the iterator which is on the top of the stack, +and pushes the result to the stack. It has [`specializations`](adaptive.md) +for a few common iterator types, including `FOR_ITER_GEN`, for iterating over +a generator. `FOR_ITER_GEN` bypasses the call to `__next__`, and instead +directly pushes the generator stack and resumes its execution from the +instruction that follows the last yield. + +Chained Generators +------------------ + +A `yield from` expression creates a generator that efficiently yields the +sequence created by another generator. This is implemented with the +[`SEND` instruction](https://docs.python.org/dev/library/dis.html#opcode-SEND), +which pushes the value of its arg to the stack of the generator's frame, sets +the exception state on this frame, and resumes execution of the chained generator. +On return from `SEND`, the value at the top of the stack is sent back up +the generator chain with a `YIELD_VALUE`. This sequence of `SEND` followed by +`YIELD_VALUE` is repeated in a loop, until a `StopIteration` exception is +raised to indicate that the generator has no more values to emit. + +The [`CLEANUP_THROW`](https://docs.python.org/dev/library/dis.html#opcode-CLEANUP_THROW) +instruction is used to handle exceptions raised from the send-yield loop. +Exceptions of type `StopIteration` is handled, their `value` field hold the +value to be returned by the generator's `close()` function. Any other +exception is re-raised by `CLEANUP_THROW`. + +Coroutines +---------- -Coming soon. +Coroutines are generators that use the value returned from a `yield` expression, +i.e., the argument that was passed to the `.send()` call that resumed it after +it yielded. This makes it possible for data to flow in both directions: from +the generator to the caller via the argument of the `yield` expression, and +from the caller to the generator via the send argument to the `send()` call. +A `yield from` expression passes the `send` argument to the chained generator, +so this data flow works along the chain (see `gen_send_ex2()` in +[`genobject.c`](../Objects/genobject.c)). - +Recall that a generator's `__next__` function simply calls `self.send(None)`, +so all this works the same in generators and coroutines, but only coroutines +use the value of the argument to `send`. diff --git a/InternalDocs/jit.md b/InternalDocs/jit.md index 1e9f385d5f87fa..2c204f39792d6a 100644 --- a/InternalDocs/jit.md +++ b/InternalDocs/jit.md @@ -38,12 +38,8 @@ executor in `co_executors`. ## The micro-op optimizer -The optimizer that `_PyOptimizer_Optimize()` runs is configurable via the -`_Py_SetTier2Optimizer()` function (this is used in test via -`_testinternalcapi.set_optimizer()`.) - The micro-op (abbreviated `uop` to approximate `μop`) optimizer is defined in -[`Python/optimizer.c`](../Python/optimizer.c) as the type `_PyUOpOptimizer_Type`. +[`Python/optimizer.c`](../Python/optimizer.c) as `_PyOptimizer_Optimize`. It translates an instruction trace into a sequence of micro-ops by replacing each bytecode by an equivalent sequence of micro-ops (see `_PyOpcode_macro_expansion` in diff --git a/Lib/_colorize.py b/Lib/_colorize.py index f609901887a26b..41e818f2a747ff 100644 --- a/Lib/_colorize.py +++ b/Lib/_colorize.py @@ -26,30 +26,32 @@ class ANSIColors: setattr(NoColors, attr, "") -def get_colors(colorize: bool = False) -> ANSIColors: - if colorize or can_colorize(): +def get_colors(colorize: bool = False, *, file=None) -> ANSIColors: + if colorize or can_colorize(file=file): return ANSIColors() else: return NoColors -def can_colorize() -> bool: +def can_colorize(*, file=None) -> bool: + if file is None: + file = sys.stdout + if not sys.flags.ignore_environment: if os.environ.get("PYTHON_COLORS") == "0": return False if os.environ.get("PYTHON_COLORS") == "1": return True - if "NO_COLOR" in os.environ: - return False + if os.environ.get("NO_COLOR"): + return False if not COLORIZE: return False - if not sys.flags.ignore_environment: - if "FORCE_COLOR" in os.environ: - return True - if os.environ.get("TERM") == "dumb": - return False + if os.environ.get("FORCE_COLOR"): + return True + if os.environ.get("TERM") == "dumb": + return False - if not hasattr(sys.stderr, "fileno"): + if not hasattr(file, "fileno"): return False if sys.platform == "win32": @@ -62,6 +64,6 @@ def can_colorize() -> bool: return False try: - return os.isatty(sys.stderr.fileno()) + return os.isatty(file.fileno()) except io.UnsupportedOperation: - return sys.stderr.isatty() + return file.isatty() diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index dada2cb5fa033f..12c41374592185 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -7,6 +7,7 @@ "RESUME_CHECK", ], "LOAD_CONST": [ + "LOAD_CONST_MORTAL", "LOAD_CONST_IMMORTAL", ], "TO_BOOL": [ @@ -25,6 +26,7 @@ "BINARY_OP_ADD_FLOAT", "BINARY_OP_SUBTRACT_FLOAT", "BINARY_OP_ADD_UNICODE", + "BINARY_OP_EXTEND", "BINARY_OP_INPLACE_ADD_UNICODE", ], "BINARY_SUBSCR": [ @@ -83,6 +85,10 @@ "CONTAINS_OP_SET", "CONTAINS_OP_DICT", ], + "JUMP_BACKWARD": [ + "JUMP_BACKWARD_NO_JIT", + "JUMP_BACKWARD_JIT", + ], "FOR_ITER": [ "FOR_ITER_LIST", "FOR_ITER_TUPLE", @@ -122,82 +128,86 @@ 'BINARY_OP_ADD_FLOAT': 150, 'BINARY_OP_ADD_INT': 151, 'BINARY_OP_ADD_UNICODE': 152, + 'BINARY_OP_EXTEND': 153, 'BINARY_OP_INPLACE_ADD_UNICODE': 3, - 'BINARY_OP_MULTIPLY_FLOAT': 153, - 'BINARY_OP_MULTIPLY_INT': 154, - 'BINARY_OP_SUBTRACT_FLOAT': 155, - 'BINARY_OP_SUBTRACT_INT': 156, - 'BINARY_SUBSCR_DICT': 157, - 'BINARY_SUBSCR_GETITEM': 158, - 'BINARY_SUBSCR_LIST_INT': 159, - 'BINARY_SUBSCR_STR_INT': 160, - 'BINARY_SUBSCR_TUPLE_INT': 161, - 'CALL_ALLOC_AND_ENTER_INIT': 162, - 'CALL_BOUND_METHOD_EXACT_ARGS': 163, - 'CALL_BOUND_METHOD_GENERAL': 164, - 'CALL_BUILTIN_CLASS': 165, - 'CALL_BUILTIN_FAST': 166, - 'CALL_BUILTIN_FAST_WITH_KEYWORDS': 167, - 'CALL_BUILTIN_O': 168, - 'CALL_ISINSTANCE': 169, - 'CALL_KW_BOUND_METHOD': 170, - 'CALL_KW_NON_PY': 171, - 'CALL_KW_PY': 172, - 'CALL_LEN': 173, - 'CALL_LIST_APPEND': 174, - 'CALL_METHOD_DESCRIPTOR_FAST': 175, - 'CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS': 176, - 'CALL_METHOD_DESCRIPTOR_NOARGS': 177, - 'CALL_METHOD_DESCRIPTOR_O': 178, - 'CALL_NON_PY_GENERAL': 179, - 'CALL_PY_EXACT_ARGS': 180, - 'CALL_PY_GENERAL': 181, - 'CALL_STR_1': 182, - 'CALL_TUPLE_1': 183, - 'CALL_TYPE_1': 184, - 'COMPARE_OP_FLOAT': 185, - 'COMPARE_OP_INT': 186, - 'COMPARE_OP_STR': 187, - 'CONTAINS_OP_DICT': 188, - 'CONTAINS_OP_SET': 189, - 'FOR_ITER_GEN': 190, - 'FOR_ITER_LIST': 191, - 'FOR_ITER_RANGE': 192, - 'FOR_ITER_TUPLE': 193, - 'LOAD_ATTR_CLASS': 194, - 'LOAD_ATTR_CLASS_WITH_METACLASS_CHECK': 195, - 'LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN': 196, - 'LOAD_ATTR_INSTANCE_VALUE': 197, - 'LOAD_ATTR_METHOD_LAZY_DICT': 198, - 'LOAD_ATTR_METHOD_NO_DICT': 199, - 'LOAD_ATTR_METHOD_WITH_VALUES': 200, - 'LOAD_ATTR_MODULE': 201, - 'LOAD_ATTR_NONDESCRIPTOR_NO_DICT': 202, - 'LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES': 203, - 'LOAD_ATTR_PROPERTY': 204, - 'LOAD_ATTR_SLOT': 205, - 'LOAD_ATTR_WITH_HINT': 206, - 'LOAD_CONST_IMMORTAL': 207, - 'LOAD_GLOBAL_BUILTIN': 208, - 'LOAD_GLOBAL_MODULE': 209, - 'LOAD_SUPER_ATTR_ATTR': 210, - 'LOAD_SUPER_ATTR_METHOD': 211, - 'RESUME_CHECK': 212, - 'SEND_GEN': 213, - 'STORE_ATTR_INSTANCE_VALUE': 214, - 'STORE_ATTR_SLOT': 215, - 'STORE_ATTR_WITH_HINT': 216, - 'STORE_SUBSCR_DICT': 217, - 'STORE_SUBSCR_LIST_INT': 218, - 'TO_BOOL_ALWAYS_TRUE': 219, - 'TO_BOOL_BOOL': 220, - 'TO_BOOL_INT': 221, - 'TO_BOOL_LIST': 222, - 'TO_BOOL_NONE': 223, - 'TO_BOOL_STR': 224, - 'UNPACK_SEQUENCE_LIST': 225, - 'UNPACK_SEQUENCE_TUPLE': 226, - 'UNPACK_SEQUENCE_TWO_TUPLE': 227, + 'BINARY_OP_MULTIPLY_FLOAT': 154, + 'BINARY_OP_MULTIPLY_INT': 155, + 'BINARY_OP_SUBTRACT_FLOAT': 156, + 'BINARY_OP_SUBTRACT_INT': 157, + 'BINARY_SUBSCR_DICT': 158, + 'BINARY_SUBSCR_GETITEM': 159, + 'BINARY_SUBSCR_LIST_INT': 160, + 'BINARY_SUBSCR_STR_INT': 161, + 'BINARY_SUBSCR_TUPLE_INT': 162, + 'CALL_ALLOC_AND_ENTER_INIT': 163, + 'CALL_BOUND_METHOD_EXACT_ARGS': 164, + 'CALL_BOUND_METHOD_GENERAL': 165, + 'CALL_BUILTIN_CLASS': 166, + 'CALL_BUILTIN_FAST': 167, + 'CALL_BUILTIN_FAST_WITH_KEYWORDS': 168, + 'CALL_BUILTIN_O': 169, + 'CALL_ISINSTANCE': 170, + 'CALL_KW_BOUND_METHOD': 171, + 'CALL_KW_NON_PY': 172, + 'CALL_KW_PY': 173, + 'CALL_LEN': 174, + 'CALL_LIST_APPEND': 175, + 'CALL_METHOD_DESCRIPTOR_FAST': 176, + 'CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS': 177, + 'CALL_METHOD_DESCRIPTOR_NOARGS': 178, + 'CALL_METHOD_DESCRIPTOR_O': 179, + 'CALL_NON_PY_GENERAL': 180, + 'CALL_PY_EXACT_ARGS': 181, + 'CALL_PY_GENERAL': 182, + 'CALL_STR_1': 183, + 'CALL_TUPLE_1': 184, + 'CALL_TYPE_1': 185, + 'COMPARE_OP_FLOAT': 186, + 'COMPARE_OP_INT': 187, + 'COMPARE_OP_STR': 188, + 'CONTAINS_OP_DICT': 189, + 'CONTAINS_OP_SET': 190, + 'FOR_ITER_GEN': 191, + 'FOR_ITER_LIST': 192, + 'FOR_ITER_RANGE': 193, + 'FOR_ITER_TUPLE': 194, + 'JUMP_BACKWARD_JIT': 195, + 'JUMP_BACKWARD_NO_JIT': 196, + 'LOAD_ATTR_CLASS': 197, + 'LOAD_ATTR_CLASS_WITH_METACLASS_CHECK': 198, + 'LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN': 199, + 'LOAD_ATTR_INSTANCE_VALUE': 200, + 'LOAD_ATTR_METHOD_LAZY_DICT': 201, + 'LOAD_ATTR_METHOD_NO_DICT': 202, + 'LOAD_ATTR_METHOD_WITH_VALUES': 203, + 'LOAD_ATTR_MODULE': 204, + 'LOAD_ATTR_NONDESCRIPTOR_NO_DICT': 205, + 'LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES': 206, + 'LOAD_ATTR_PROPERTY': 207, + 'LOAD_ATTR_SLOT': 208, + 'LOAD_ATTR_WITH_HINT': 209, + 'LOAD_CONST_IMMORTAL': 210, + 'LOAD_CONST_MORTAL': 211, + 'LOAD_GLOBAL_BUILTIN': 212, + 'LOAD_GLOBAL_MODULE': 213, + 'LOAD_SUPER_ATTR_ATTR': 214, + 'LOAD_SUPER_ATTR_METHOD': 215, + 'RESUME_CHECK': 216, + 'SEND_GEN': 217, + 'STORE_ATTR_INSTANCE_VALUE': 218, + 'STORE_ATTR_SLOT': 219, + 'STORE_ATTR_WITH_HINT': 220, + 'STORE_SUBSCR_DICT': 221, + 'STORE_SUBSCR_LIST_INT': 222, + 'TO_BOOL_ALWAYS_TRUE': 223, + 'TO_BOOL_BOOL': 224, + 'TO_BOOL_INT': 225, + 'TO_BOOL_LIST': 226, + 'TO_BOOL_NONE': 227, + 'TO_BOOL_STR': 228, + 'UNPACK_SEQUENCE_LIST': 229, + 'UNPACK_SEQUENCE_TUPLE': 230, + 'UNPACK_SEQUENCE_TWO_TUPLE': 231, } opmap = { @@ -208,119 +218,121 @@ 'ENTER_EXECUTOR': 255, 'BINARY_SLICE': 1, 'BINARY_SUBSCR': 2, - 'CHECK_EG_MATCH': 4, - 'CHECK_EXC_MATCH': 5, - 'CLEANUP_THROW': 6, - 'DELETE_SUBSCR': 7, - 'END_ASYNC_FOR': 8, - 'END_FOR': 9, - 'END_SEND': 10, - 'EXIT_INIT_CHECK': 11, - 'FORMAT_SIMPLE': 12, - 'FORMAT_WITH_SPEC': 13, - 'GET_AITER': 14, - 'GET_ANEXT': 15, - 'GET_ITER': 16, - 'GET_LEN': 18, - 'GET_YIELD_FROM_ITER': 19, - 'INTERPRETER_EXIT': 20, - 'LOAD_BUILD_CLASS': 21, - 'LOAD_LOCALS': 22, - 'MAKE_FUNCTION': 23, - 'MATCH_KEYS': 24, - 'MATCH_MAPPING': 25, - 'MATCH_SEQUENCE': 26, - 'NOP': 27, - 'NOT_TAKEN': 28, - 'POP_EXCEPT': 29, - 'POP_TOP': 30, - 'PUSH_EXC_INFO': 31, - 'PUSH_NULL': 32, - 'RETURN_GENERATOR': 33, - 'RETURN_VALUE': 34, - 'SETUP_ANNOTATIONS': 35, - 'STORE_SLICE': 36, - 'STORE_SUBSCR': 37, - 'TO_BOOL': 38, - 'UNARY_INVERT': 39, - 'UNARY_NEGATIVE': 40, - 'UNARY_NOT': 41, - 'WITH_EXCEPT_START': 42, - 'BINARY_OP': 43, - 'BUILD_LIST': 44, - 'BUILD_MAP': 45, - 'BUILD_SET': 46, - 'BUILD_SLICE': 47, - 'BUILD_STRING': 48, - 'BUILD_TUPLE': 49, - 'CALL': 50, - 'CALL_FUNCTION_EX': 51, - 'CALL_INTRINSIC_1': 52, - 'CALL_INTRINSIC_2': 53, - 'CALL_KW': 54, - 'COMPARE_OP': 55, - 'CONTAINS_OP': 56, - 'CONVERT_VALUE': 57, - 'COPY': 58, - 'COPY_FREE_VARS': 59, - 'DELETE_ATTR': 60, - 'DELETE_DEREF': 61, - 'DELETE_FAST': 62, - 'DELETE_GLOBAL': 63, - 'DELETE_NAME': 64, - 'DICT_MERGE': 65, - 'DICT_UPDATE': 66, - 'EXTENDED_ARG': 67, - 'FOR_ITER': 68, - 'GET_AWAITABLE': 69, - 'IMPORT_FROM': 70, - 'IMPORT_NAME': 71, - 'IS_OP': 72, - 'JUMP_BACKWARD': 73, - 'JUMP_BACKWARD_NO_INTERRUPT': 74, - 'JUMP_FORWARD': 75, - 'LIST_APPEND': 76, - 'LIST_EXTEND': 77, - 'LOAD_ATTR': 78, - 'LOAD_COMMON_CONSTANT': 79, - 'LOAD_CONST': 80, - 'LOAD_DEREF': 81, - 'LOAD_FAST': 82, - 'LOAD_FAST_AND_CLEAR': 83, - 'LOAD_FAST_CHECK': 84, - 'LOAD_FAST_LOAD_FAST': 85, - 'LOAD_FROM_DICT_OR_DEREF': 86, - 'LOAD_FROM_DICT_OR_GLOBALS': 87, - 'LOAD_GLOBAL': 88, - 'LOAD_NAME': 89, - 'LOAD_SMALL_INT': 90, - 'LOAD_SPECIAL': 91, - 'LOAD_SUPER_ATTR': 92, - 'MAKE_CELL': 93, - 'MAP_ADD': 94, - 'MATCH_CLASS': 95, - 'POP_JUMP_IF_FALSE': 96, - 'POP_JUMP_IF_NONE': 97, - 'POP_JUMP_IF_NOT_NONE': 98, - 'POP_JUMP_IF_TRUE': 99, - 'RAISE_VARARGS': 100, - 'RERAISE': 101, - 'SEND': 102, - 'SET_ADD': 103, - 'SET_FUNCTION_ATTRIBUTE': 104, - 'SET_UPDATE': 105, - 'STORE_ATTR': 106, - 'STORE_DEREF': 107, - 'STORE_FAST': 108, - 'STORE_FAST_LOAD_FAST': 109, - 'STORE_FAST_STORE_FAST': 110, - 'STORE_GLOBAL': 111, - 'STORE_NAME': 112, - 'SWAP': 113, - 'UNPACK_EX': 114, - 'UNPACK_SEQUENCE': 115, - 'YIELD_VALUE': 116, - 'INSTRUMENTED_END_FOR': 236, + 'CALL_FUNCTION_EX': 4, + 'CHECK_EG_MATCH': 5, + 'CHECK_EXC_MATCH': 6, + 'CLEANUP_THROW': 7, + 'DELETE_SUBSCR': 8, + 'END_ASYNC_FOR': 9, + 'END_FOR': 10, + 'END_SEND': 11, + 'EXIT_INIT_CHECK': 12, + 'FORMAT_SIMPLE': 13, + 'FORMAT_WITH_SPEC': 14, + 'GET_AITER': 15, + 'GET_ANEXT': 16, + 'GET_ITER': 18, + 'GET_LEN': 19, + 'GET_YIELD_FROM_ITER': 20, + 'INTERPRETER_EXIT': 21, + 'LOAD_BUILD_CLASS': 22, + 'LOAD_LOCALS': 23, + 'MAKE_FUNCTION': 24, + 'MATCH_KEYS': 25, + 'MATCH_MAPPING': 26, + 'MATCH_SEQUENCE': 27, + 'NOP': 28, + 'NOT_TAKEN': 29, + 'POP_EXCEPT': 30, + 'POP_ITER': 31, + 'POP_TOP': 32, + 'PUSH_EXC_INFO': 33, + 'PUSH_NULL': 34, + 'RETURN_GENERATOR': 35, + 'RETURN_VALUE': 36, + 'SETUP_ANNOTATIONS': 37, + 'STORE_SLICE': 38, + 'STORE_SUBSCR': 39, + 'TO_BOOL': 40, + 'UNARY_INVERT': 41, + 'UNARY_NEGATIVE': 42, + 'UNARY_NOT': 43, + 'WITH_EXCEPT_START': 44, + 'BINARY_OP': 45, + 'BUILD_LIST': 46, + 'BUILD_MAP': 47, + 'BUILD_SET': 48, + 'BUILD_SLICE': 49, + 'BUILD_STRING': 50, + 'BUILD_TUPLE': 51, + 'CALL': 52, + 'CALL_INTRINSIC_1': 53, + 'CALL_INTRINSIC_2': 54, + 'CALL_KW': 55, + 'COMPARE_OP': 56, + 'CONTAINS_OP': 57, + 'CONVERT_VALUE': 58, + 'COPY': 59, + 'COPY_FREE_VARS': 60, + 'DELETE_ATTR': 61, + 'DELETE_DEREF': 62, + 'DELETE_FAST': 63, + 'DELETE_GLOBAL': 64, + 'DELETE_NAME': 65, + 'DICT_MERGE': 66, + 'DICT_UPDATE': 67, + 'EXTENDED_ARG': 68, + 'FOR_ITER': 69, + 'GET_AWAITABLE': 70, + 'IMPORT_FROM': 71, + 'IMPORT_NAME': 72, + 'IS_OP': 73, + 'JUMP_BACKWARD': 74, + 'JUMP_BACKWARD_NO_INTERRUPT': 75, + 'JUMP_FORWARD': 76, + 'LIST_APPEND': 77, + 'LIST_EXTEND': 78, + 'LOAD_ATTR': 79, + 'LOAD_COMMON_CONSTANT': 80, + 'LOAD_CONST': 81, + 'LOAD_DEREF': 82, + 'LOAD_FAST': 83, + 'LOAD_FAST_AND_CLEAR': 84, + 'LOAD_FAST_CHECK': 85, + 'LOAD_FAST_LOAD_FAST': 86, + 'LOAD_FROM_DICT_OR_DEREF': 87, + 'LOAD_FROM_DICT_OR_GLOBALS': 88, + 'LOAD_GLOBAL': 89, + 'LOAD_NAME': 90, + 'LOAD_SMALL_INT': 91, + 'LOAD_SPECIAL': 92, + 'LOAD_SUPER_ATTR': 93, + 'MAKE_CELL': 94, + 'MAP_ADD': 95, + 'MATCH_CLASS': 96, + 'POP_JUMP_IF_FALSE': 97, + 'POP_JUMP_IF_NONE': 98, + 'POP_JUMP_IF_NOT_NONE': 99, + 'POP_JUMP_IF_TRUE': 100, + 'RAISE_VARARGS': 101, + 'RERAISE': 102, + 'SEND': 103, + 'SET_ADD': 104, + 'SET_FUNCTION_ATTRIBUTE': 105, + 'SET_UPDATE': 106, + 'STORE_ATTR': 107, + 'STORE_DEREF': 108, + 'STORE_FAST': 109, + 'STORE_FAST_LOAD_FAST': 110, + 'STORE_FAST_STORE_FAST': 111, + 'STORE_GLOBAL': 112, + 'STORE_NAME': 113, + 'SWAP': 114, + 'UNPACK_EX': 115, + 'UNPACK_SEQUENCE': 116, + 'YIELD_VALUE': 117, + 'INSTRUMENTED_END_FOR': 235, + 'INSTRUMENTED_POP_ITER': 236, 'INSTRUMENTED_END_SEND': 237, 'INSTRUMENTED_LOAD_SUPER_ATTR': 238, 'INSTRUMENTED_FOR_ITER': 239, @@ -350,5 +362,5 @@ 'STORE_FAST_MAYBE_NULL': 265, } -HAVE_ARGUMENT = 42 -MIN_INSTRUMENTED_OPCODE = 236 +HAVE_ARGUMENT = 44 +MIN_INSTRUMENTED_OPCODE = 235 diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 14961c39d3541d..023478aa78c6a0 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -1692,13 +1692,14 @@ def readall(self): return bytes(result) - def readinto(self, b): + def readinto(self, buffer): """Same as RawIOBase.readinto().""" - m = memoryview(b).cast('B') - data = self.read(len(m)) - n = len(data) - m[:n] = data - return n + self._checkClosed() + self._checkReadable() + try: + return os.readinto(self._fd, buffer) + except BlockingIOError: + return None def write(self, b): """Write bytes b to file, return number written. diff --git a/Lib/_pyrepl/commands.py b/Lib/_pyrepl/commands.py index c3fce91013b001..503ca1da329eaa 100644 --- a/Lib/_pyrepl/commands.py +++ b/Lib/_pyrepl/commands.py @@ -282,7 +282,7 @@ def do(self) -> None: x, y = r.pos2xy() new_y = y + 1 - if new_y > r.max_row(): + if r.eol() == len(b): if r.historyi < len(r.history): r.select_item(r.historyi + 1) r.pos = r.eol(0) @@ -309,7 +309,7 @@ def do(self) -> None: class left(MotionCommand): def do(self) -> None: r = self.reader - for i in range(r.get_arg()): + for _ in range(r.get_arg()): p = r.pos - 1 if p >= 0: r.pos = p @@ -321,7 +321,7 @@ class right(MotionCommand): def do(self) -> None: r = self.reader b = r.buffer - for i in range(r.get_arg()): + for _ in range(r.get_arg()): p = r.pos + 1 if p <= len(b): r.pos = p @@ -459,9 +459,15 @@ def do(self) -> None: from site import gethistoryfile # type: ignore[attr-defined] history = os.linesep.join(self.reader.history[:]) - with self.reader.suspend(): - pager = get_pager() - pager(history, gethistoryfile()) + self.reader.console.restore() + pager = get_pager() + pager(history, gethistoryfile()) + self.reader.console.prepare() + + # We need to copy over the state so that it's consistent between + # console and reader, and console does not overwrite/append stuff + self.reader.console.screen = self.reader.screen.copy() + self.reader.console.posxy = self.reader.cxy class paste_mode(Command): diff --git a/Lib/_pyrepl/completing_reader.py b/Lib/_pyrepl/completing_reader.py index e856bb9807c7f6..1cd4b6367ca8b1 100644 --- a/Lib/_pyrepl/completing_reader.py +++ b/Lib/_pyrepl/completing_reader.py @@ -260,10 +260,15 @@ def after_command(self, cmd: Command) -> None: def calc_screen(self) -> list[str]: screen = super().calc_screen() if self.cmpltn_menu_visible: - ly = self.lxy[1] + # We display the completions menu below the current prompt + ly = self.lxy[1] + 1 screen[ly:ly] = self.cmpltn_menu - self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu) - self.cxy = self.cxy[0], self.cxy[1] + len(self.cmpltn_menu) + # If we're not in the middle of multiline edit, don't append to screeninfo + # since that screws up the position calculation in pos2xy function. + # This is a hack to prevent the cursor jumping + # into the completions menu when pressing left or down arrow. + if self.pos != len(self.buffer): + self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu) return screen def finish(self) -> None: diff --git a/Lib/_pyrepl/console.py b/Lib/_pyrepl/console.py index 03266c4dfc2dd8..0d78890b4f45d5 100644 --- a/Lib/_pyrepl/console.py +++ b/Lib/_pyrepl/console.py @@ -45,6 +45,7 @@ class Event: @dataclass class Console(ABC): + posxy: tuple[int, int] screen: list[str] = field(default_factory=list) height: int = 25 width: int = 80 diff --git a/Lib/_pyrepl/fancy_termios.py b/Lib/_pyrepl/fancy_termios.py index 5b85cb0f52521f..0468b9a2670267 100644 --- a/Lib/_pyrepl/fancy_termios.py +++ b/Lib/_pyrepl/fancy_termios.py @@ -40,7 +40,9 @@ def as_list(self): self.lflag, self.ispeed, self.ospeed, - self.cc, + # Always return a copy of the control characters list to ensure + # there are not any additional references to self.cc + self.cc[:], ] def copy(self): diff --git a/Lib/_pyrepl/historical_reader.py b/Lib/_pyrepl/historical_reader.py index 5d416f336ad5d2..c4b95fa2e81ee6 100644 --- a/Lib/_pyrepl/historical_reader.py +++ b/Lib/_pyrepl/historical_reader.py @@ -290,13 +290,17 @@ def get_item(self, i: int) -> str: @contextmanager def suspend(self) -> SimpleContextManager: - with super().suspend(): - try: - old_history = self.history[:] - del self.history[:] - yield - finally: - self.history[:] = old_history + with super().suspend(), self.suspend_history(): + yield + + @contextmanager + def suspend_history(self) -> SimpleContextManager: + try: + old_history = self.history[:] + del self.history[:] + yield + finally: + self.history[:] = old_history def prepare(self) -> None: super().prepare() diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py index 4b0700d069c621..1252847e02b2ea 100644 --- a/Lib/_pyrepl/reader.py +++ b/Lib/_pyrepl/reader.py @@ -587,10 +587,11 @@ def setpos_from_xy(self, x: int, y: int) -> None: def pos2xy(self) -> tuple[int, int]: """Return the x, y coordinates of position 'pos'.""" # this *is* incomprehensible, yes. - y = 0 + p, y = 0, 0 + l2: list[int] = [] pos = self.pos assert 0 <= pos <= len(self.buffer) - if pos == len(self.buffer): + if pos == len(self.buffer) and len(self.screeninfo) > 0: y = len(self.screeninfo) - 1 p, l2 = self.screeninfo[y] return p + sum(l2) + l2.count(0), y diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py index a5033496712a73..a065174ad42fb6 100644 --- a/Lib/_pyrepl/simple_interact.py +++ b/Lib/_pyrepl/simple_interact.py @@ -77,7 +77,7 @@ def _clear_screen(): "exit": _sitebuiltins.Quitter('exit', ''), "quit": _sitebuiltins.Quitter('quit' ,''), "copyright": _sitebuiltins._Printer('copyright', sys.copyright), - "help": "help", + "help": _sitebuiltins._Helper(), "clear": _clear_screen, "\x1a": _sitebuiltins.Quitter('\x1a', ''), } @@ -124,18 +124,10 @@ def maybe_run_command(statement: str) -> bool: reader.history.pop() # skip internal commands in history command = REPL_COMMANDS[statement] if callable(command): - command() + # Make sure that history does not change because of commands + with reader.suspend_history(): + command() return True - - if isinstance(command, str): - # Internal readline commands require a prepared reader like - # inside multiline_input. - reader.prepare() - reader.refresh() - reader.do_cmd((command, [statement])) - reader.restore() - return True - return False while True: diff --git a/Lib/_pyrepl/unix_console.py b/Lib/_pyrepl/unix_console.py index 63e8fc24dd7625..96379bc20f3357 100644 --- a/Lib/_pyrepl/unix_console.py +++ b/Lib/_pyrepl/unix_console.py @@ -240,7 +240,7 @@ def refresh(self, screen, c_xy): self.__hide_cursor() self.__move(0, len(self.screen) - 1) self.__write("\n") - self.__posxy = 0, len(self.screen) + self.posxy = 0, len(self.screen) self.screen.append("") else: while len(self.screen) < len(screen): @@ -250,7 +250,7 @@ def refresh(self, screen, c_xy): self.__gone_tall = 1 self.__move = self.__move_tall - px, py = self.__posxy + px, py = self.posxy old_offset = offset = self.__offset height = self.height @@ -271,7 +271,7 @@ def refresh(self, screen, c_xy): if old_offset > offset and self._ri: self.__hide_cursor() self.__write_code(self._cup, 0, 0) - self.__posxy = 0, old_offset + self.posxy = 0, old_offset for i in range(old_offset - offset): self.__write_code(self._ri) oldscr.pop(-1) @@ -279,7 +279,7 @@ def refresh(self, screen, c_xy): elif old_offset < offset and self._ind: self.__hide_cursor() self.__write_code(self._cup, self.height - 1, 0) - self.__posxy = 0, old_offset + self.height - 1 + self.posxy = 0, old_offset + self.height - 1 for i in range(offset - old_offset): self.__write_code(self._ind) oldscr.pop(0) @@ -299,7 +299,7 @@ def refresh(self, screen, c_xy): while y < len(oldscr): self.__hide_cursor() self.__move(0, y) - self.__posxy = 0, y + self.posxy = 0, y self.__write_code(self._el) y += 1 @@ -321,7 +321,7 @@ def move_cursor(self, x, y): self.event_queue.insert(Event("scroll", None)) else: self.__move(x, y) - self.__posxy = x, y + self.posxy = x, y self.flushoutput() def prepare(self): @@ -350,7 +350,7 @@ def prepare(self): self.__buffer = [] - self.__posxy = 0, 0 + self.posxy = 0, 0 self.__gone_tall = 0 self.__move = self.__move_short self.__offset = 0 @@ -449,10 +449,12 @@ def getheightwidth(self): """ try: return int(os.environ["LINES"]), int(os.environ["COLUMNS"]) - except KeyError: - height, width = struct.unpack( - "hhhh", ioctl(self.input_fd, TIOCGWINSZ, b"\000" * 8) - )[0:2] + except (KeyError, TypeError, ValueError): + try: + size = ioctl(self.input_fd, TIOCGWINSZ, b"\000" * 8) + except OSError: + return 25, 80 + height, width = struct.unpack("hhhh", size)[0:2] if not height: return 25, 80 return height, width @@ -468,7 +470,7 @@ def getheightwidth(self): """ try: return int(os.environ["LINES"]), int(os.environ["COLUMNS"]) - except KeyError: + except (KeyError, TypeError, ValueError): return 25, 80 def forgetinput(self): @@ -559,7 +561,7 @@ def clear(self): self.__write_code(self._clear) self.__gone_tall = 1 self.__move = self.__move_tall - self.__posxy = 0, 0 + self.posxy = 0, 0 self.screen = [] @property @@ -644,8 +646,8 @@ def __write_changed_line(self, y, oldline, newline, px_coord): # if we need to insert a single character right after the first detected change if oldline[x_pos:] == newline[x_pos + 1 :] and self.ich1: if ( - y == self.__posxy[1] - and x_coord > self.__posxy[0] + y == self.posxy[1] + and x_coord > self.posxy[0] and oldline[px_pos:x_pos] == newline[px_pos + 1 : x_pos + 1] ): x_pos = px_pos @@ -654,7 +656,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord): self.__move(x_coord, y) self.__write_code(self.ich1) self.__write(newline[x_pos]) - self.__posxy = x_coord + character_width, y + self.posxy = x_coord + character_width, y # if it's a single character change in the middle of the line elif ( @@ -665,7 +667,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord): character_width = wlen(newline[x_pos]) self.__move(x_coord, y) self.__write(newline[x_pos]) - self.__posxy = x_coord + character_width, y + self.posxy = x_coord + character_width, y # if this is the last character to fit in the line and we edit in the middle of the line elif ( @@ -677,14 +679,14 @@ def __write_changed_line(self, y, oldline, newline, px_coord): ): self.__hide_cursor() self.__move(self.width - 2, y) - self.__posxy = self.width - 2, y + self.posxy = self.width - 2, y self.__write_code(self.dch1) character_width = wlen(newline[x_pos]) self.__move(x_coord, y) self.__write_code(self.ich1) self.__write(newline[x_pos]) - self.__posxy = character_width + 1, y + self.posxy = character_width + 1, y else: self.__hide_cursor() @@ -692,7 +694,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord): if wlen(oldline) > wlen(newline): self.__write_code(self._el) self.__write(newline[x_pos:]) - self.__posxy = wlen(newline), y + self.posxy = wlen(newline), y if "\x1b" in newline: # ANSI escape characters are present, so we can't assume @@ -711,32 +713,36 @@ def __maybe_write_code(self, fmt, *args): self.__write_code(fmt, *args) def __move_y_cuu1_cud1(self, y): - dy = y - self.__posxy[1] + assert self._cud1 is not None + assert self._cuu1 is not None + dy = y - self.posxy[1] if dy > 0: self.__write_code(dy * self._cud1) elif dy < 0: self.__write_code((-dy) * self._cuu1) def __move_y_cuu_cud(self, y): - dy = y - self.__posxy[1] + dy = y - self.posxy[1] if dy > 0: self.__write_code(self._cud, dy) elif dy < 0: self.__write_code(self._cuu, -dy) def __move_x_hpa(self, x: int) -> None: - if x != self.__posxy[0]: + if x != self.posxy[0]: self.__write_code(self._hpa, x) def __move_x_cub1_cuf1(self, x: int) -> None: - dx = x - self.__posxy[0] + assert self._cuf1 is not None + assert self._cub1 is not None + dx = x - self.posxy[0] if dx > 0: self.__write_code(self._cuf1 * dx) elif dx < 0: self.__write_code(self._cub1 * (-dx)) def __move_x_cub_cuf(self, x: int) -> None: - dx = x - self.__posxy[0] + dx = x - self.posxy[0] if dx > 0: self.__write_code(self._cuf, dx) elif dx < 0: @@ -766,12 +772,12 @@ def __show_cursor(self): def repaint(self): if not self.__gone_tall: - self.__posxy = 0, self.__posxy[1] + self.posxy = 0, self.posxy[1] self.__write("\r") ns = len(self.screen) * ["\000" * self.width] self.screen = ns else: - self.__posxy = 0, self.__offset + self.posxy = 0, self.__offset self.__move(0, self.__offset) ns = self.height * ["\000" * self.width] self.screen = ns diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py index 0f36083b6ffa92..4651717bd7e121 100644 --- a/Lib/_pyrepl/utils.py +++ b/Lib/_pyrepl/utils.py @@ -16,7 +16,7 @@ def str_width(c: str) -> int: def wlen(s: str) -> int: - if len(s) == 1: + if len(s) == 1 and s != '\x1a': return str_width(s) length = sum(str_width(i) for i in s) # remove lengths of any escape sequences diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py index d457d2b5a338eb..e1ecd9845aefb4 100644 --- a/Lib/_pyrepl/windows_console.py +++ b/Lib/_pyrepl/windows_console.py @@ -102,6 +102,10 @@ def __init__(self, err: int | None, descr: str | None = None) -> None: MOVE_DOWN = "\x1b[{}B" CLEAR = "\x1b[H\x1b[J" +# State of control keys: https://learn.microsoft.com/en-us/windows/console/key-event-record-str +ALT_ACTIVE = 0x01 | 0x02 +CTRL_ACTIVE = 0x04 | 0x08 + class _error(Exception): pass @@ -148,10 +152,10 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: self._hide_cursor() self._move_relative(0, len(self.screen) - 1) self.__write("\n") - self.__posxy = 0, len(self.screen) + self.posxy = 0, len(self.screen) self.screen.append("") - px, py = self.__posxy + px, py = self.posxy old_offset = offset = self.__offset height = self.height @@ -167,7 +171,7 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: # portion of the window. We need to scroll the visible portion and the # entire history self._scroll(scroll_lines, self._getscrollbacksize()) - self.__posxy = self.__posxy[0], self.__posxy[1] + scroll_lines + self.posxy = self.posxy[0], self.posxy[1] + scroll_lines self.__offset += scroll_lines for i in range(scroll_lines): @@ -193,7 +197,7 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: y = len(newscr) while y < len(oldscr): self._move_relative(0, y) - self.__posxy = 0, y + self.posxy = 0, y self._erase_to_end() y += 1 @@ -250,11 +254,11 @@ def __write_changed_line( if wlen(newline) == self.width: # If we wrapped we want to start at the next line self._move_relative(0, y + 1) - self.__posxy = 0, y + 1 + self.posxy = 0, y + 1 else: - self.__posxy = wlen(newline), y + self.posxy = wlen(newline), y - if "\x1b" in newline or y != self.__posxy[1] or '\x1a' in newline: + if "\x1b" in newline or y != self.posxy[1] or '\x1a' in newline: # ANSI escape characters are present, so we can't assume # anything about the position of the cursor. Moving the cursor # to the left margin should work to get to a known position. @@ -316,7 +320,7 @@ def prepare(self) -> None: self.screen = [] self.height, self.width = self.getheightwidth() - self.__posxy = 0, 0 + self.posxy = 0, 0 self.__gone_tall = 0 self.__offset = 0 @@ -324,9 +328,9 @@ def restore(self) -> None: pass def _move_relative(self, x: int, y: int) -> None: - """Moves relative to the current __posxy""" - dx = x - self.__posxy[0] - dy = y - self.__posxy[1] + """Moves relative to the current posxy""" + dx = x - self.posxy[0] + dy = y - self.posxy[1] if dx < 0: self.__write(MOVE_LEFT.format(-dx)) elif dx > 0: @@ -345,7 +349,7 @@ def move_cursor(self, x: int, y: int) -> None: self.event_queue.insert(0, Event("scroll", "")) else: self._move_relative(x, y) - self.__posxy = x, y + self.posxy = x, y def set_cursor_vis(self, visible: bool) -> None: if visible: @@ -407,31 +411,37 @@ def get_event(self, block: bool = True) -> Event | None: continue return None - key = rec.Event.KeyEvent.uChar.UnicodeChar + key_event = rec.Event.KeyEvent + raw_key = key = key_event.uChar.UnicodeChar - if rec.Event.KeyEvent.uChar.UnicodeChar == "\r": - # Make enter make unix-like + if key == "\r": + # Make enter unix-like return Event(evt="key", data="\n", raw=b"\n") - elif rec.Event.KeyEvent.wVirtualKeyCode == 8: + elif key_event.wVirtualKeyCode == 8: # Turn backspace directly into the command - return Event( - evt="key", - data="backspace", - raw=rec.Event.KeyEvent.uChar.UnicodeChar, - ) - elif rec.Event.KeyEvent.uChar.UnicodeChar == "\x00": + key = "backspace" + elif key == "\x00": # Handle special keys like arrow keys and translate them into the appropriate command - code = VK_MAP.get(rec.Event.KeyEvent.wVirtualKeyCode) - if code: - return Event( - evt="key", data=code, raw=rec.Event.KeyEvent.uChar.UnicodeChar - ) + key = VK_MAP.get(key_event.wVirtualKeyCode) + if key: + if key_event.dwControlKeyState & CTRL_ACTIVE: + key = f"ctrl {key}" + elif key_event.dwControlKeyState & ALT_ACTIVE: + # queue the key, return the meta command + self.event_queue.insert(0, Event(evt="key", data=key, raw=key)) + return Event(evt="key", data="\033") # keymap.py uses this for meta + return Event(evt="key", data=key, raw=key) if block: continue return None - return Event(evt="key", data=key, raw=rec.Event.KeyEvent.uChar.UnicodeChar) + if key_event.dwControlKeyState & ALT_ACTIVE: + # queue the key, return the meta command + self.event_queue.insert(0, Event(evt="key", data=key, raw=raw_key)) + return Event(evt="key", data="\033") # keymap.py uses this for meta + + return Event(evt="key", data=key, raw=raw_key) def push_char(self, char: int | bytes) -> None: """ @@ -445,7 +455,7 @@ def beep(self) -> None: def clear(self) -> None: """Wipe the screen""" self.__write(CLEAR) - self.__posxy = 0, 0 + self.posxy = 0, 0 self.screen = [""] def finish(self) -> None: diff --git a/Lib/ast.py b/Lib/ast.py index 154d2c8c1f9ebb..0937c27bdf8a11 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -1196,9 +1196,14 @@ def visit_JoinedStr(self, node): fallback_to_repr = True break quote_types = new_quote_types - elif "\n" in value: - quote_types = [q for q in quote_types if q in _MULTI_QUOTES] - assert quote_types + else: + if "\n" in value: + quote_types = [q for q in quote_types if q in _MULTI_QUOTES] + assert quote_types + + new_quote_types = [q for q in quote_types if q not in value] + if new_quote_types: + quote_types = new_quote_types new_fstring_parts.append(value) if fallback_to_repr: diff --git a/Lib/asyncio/__init__.py b/Lib/asyncio/__init__.py index edb615b1b6b1c6..4be7112fa017d4 100644 --- a/Lib/asyncio/__init__.py +++ b/Lib/asyncio/__init__.py @@ -10,6 +10,7 @@ from .events import * from .exceptions import * from .futures import * +from .graph import * from .locks import * from .protocols import * from .runners import * @@ -27,6 +28,7 @@ events.__all__ + exceptions.__all__ + futures.__all__ + + graph.__all__ + locks.__all__ + protocols.__all__ + runners.__all__ + diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 5dbe4b28d236d3..ed852421e44212 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -458,26 +458,24 @@ def create_future(self): """Create a Future object attached to the loop.""" return futures.Future(loop=self) - def create_task(self, coro, *, name=None, context=None): + def create_task(self, coro, **kwargs): """Schedule a coroutine object. Return a task object. """ self._check_closed() - if self._task_factory is None: - task = tasks.Task(coro, loop=self, name=name, context=context) - if task._source_traceback: - del task._source_traceback[-1] - else: - if context is None: - # Use legacy API if context is not needed - task = self._task_factory(self, coro) - else: - task = self._task_factory(self, coro, context=context) - - task.set_name(name) + if self._task_factory is not None: + return self._task_factory(self, coro, **kwargs) - return task + task = tasks.Task(coro, loop=self, **kwargs) + if task._source_traceback: + del task._source_traceback[-1] + try: + return task + finally: + # gh-128552: prevent a refcycle of + # task.exception().__traceback__->BaseEventLoop.create_task->task + del task def set_task_factory(self, factory): """Set a task factory that will be used by loop.create_task(). @@ -485,9 +483,10 @@ def set_task_factory(self, factory): If factory is None the default task factory will be set. If factory is a callable, it should have a signature matching - '(loop, coro)', where 'loop' will be a reference to the active - event loop, 'coro' will be a coroutine object. The callable - must return a Future. + '(loop, coro, **kwargs)', where 'loop' will be a reference to the active + event loop, 'coro' will be a coroutine object, and **kwargs will be + arbitrary keyword arguments that should be passed on to Task. + The callable must return a Task. """ if factory is not None and not callable(factory): raise TypeError('task factory must be a callable or None') @@ -873,7 +872,10 @@ def call_soon_threadsafe(self, callback, *args, context=None): self._check_closed() if self._debug: self._check_callback(callback, 'call_soon_threadsafe') - handle = self._call_soon(callback, args, context) + handle = events._ThreadSafeHandle(callback, args, self, context) + self._ready.append(handle) + if handle._source_traceback: + del handle._source_traceback[-1] if handle._source_traceback: del handle._source_traceback[-1] self._write_to_self() @@ -1585,7 +1587,9 @@ async def create_server( if reuse_address: sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, True) - if reuse_port: + # Since Linux 6.12.9, SO_REUSEPORT is not allowed + # on other address families than AF_INET/AF_INET6. + if reuse_port and af in (socket.AF_INET, socket.AF_INET6): _set_reuseport(sock) if keep_alive: sock.setsockopt( diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index 6e291d28ec81ae..2e45b4fe6fa2dd 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -113,6 +113,34 @@ def _run(self): self._loop.call_exception_handler(context) self = None # Needed to break cycles when an exception occurs. +# _ThreadSafeHandle is used for callbacks scheduled with call_soon_threadsafe +# and is thread safe unlike Handle which is not thread safe. +class _ThreadSafeHandle(Handle): + + __slots__ = ('_lock',) + + def __init__(self, callback, args, loop, context=None): + super().__init__(callback, args, loop, context) + self._lock = threading.RLock() + + def cancel(self): + with self._lock: + return super().cancel() + + def cancelled(self): + with self._lock: + return super().cancelled() + + def _run(self): + # The event loop checks for cancellation without holding the lock + # It is possible that the handle is cancelled after the check + # but before the callback is called so check it again after acquiring + # the lock and return without calling the callback if it is cancelled. + with self._lock: + if self._cancelled: + return + return super()._run() + class TimerHandle(Handle): """Object returned by timed callback registration methods.""" @@ -301,7 +329,7 @@ def create_future(self): # Method scheduling a coroutine object: create a task. - def create_task(self, coro, *, name=None, context=None): + def create_task(self, coro, **kwargs): raise NotImplementedError # Methods for interacting with threads. diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index c95fce035cd548..d1df6707302277 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -2,6 +2,7 @@ __all__ = ( 'Future', 'wrap_future', 'isfuture', + 'future_add_to_awaited_by', 'future_discard_from_awaited_by', ) import concurrent.futures @@ -62,10 +63,13 @@ class Future: # that it is not compatible by setting this to None. # - It is set by __iter__() below so that Task.__step() can tell # the difference between - # `await Future()` or`yield from Future()` (correct) vs. + # `await Future()` or `yield from Future()` (correct) vs. # `yield Future()` (incorrect). _asyncio_future_blocking = False + # Used by the capture_call_stack() API. + __asyncio_awaited_by = None + __log_traceback = False def __init__(self, *, loop=None): @@ -115,6 +119,12 @@ def _log_traceback(self, val): raise ValueError('_log_traceback can only be set to False') self.__log_traceback = False + @property + def _asyncio_awaited_by(self): + if self.__asyncio_awaited_by is None: + return None + return frozenset(self.__asyncio_awaited_by) + def get_loop(self): """Return the event loop the Future is bound to.""" loop = self._loop @@ -415,6 +425,49 @@ def wrap_future(future, *, loop=None): return new_future +def future_add_to_awaited_by(fut, waiter, /): + """Record that `fut` is awaited on by `waiter`.""" + # For the sake of keeping the implementation minimal and assuming + # that most of asyncio users use the built-in Futures and Tasks + # (or their subclasses), we only support native Future objects + # and their subclasses. + # + # Longer version: tracking requires storing the caller-callee + # dependency somewhere. One obvious choice is to store that + # information right in the future itself in a dedicated attribute. + # This means that we'd have to require all duck-type compatible + # futures to implement a specific attribute used by asyncio for + # the book keeping. Another solution would be to store that in + # a global dictionary. The downside here is that that would create + # strong references and any scenario where the "add" call isn't + # followed by a "discard" call would lead to a memory leak. + # Using WeakDict would resolve that issue, but would complicate + # the C code (_asynciomodule.c). The bottom line here is that + # it's not clear that all this work would be worth the effort. + # + # Note that there's an accelerated version of this function + # shadowing this implementation later in this file. + if isinstance(fut, _PyFuture) and isinstance(waiter, _PyFuture): + if fut._Future__asyncio_awaited_by is None: + fut._Future__asyncio_awaited_by = set() + fut._Future__asyncio_awaited_by.add(waiter) + + +def future_discard_from_awaited_by(fut, waiter, /): + """Record that `fut` is no longer awaited on by `waiter`.""" + # See the comment in "future_add_to_awaited_by()" body for + # details on implementation. + # + # Note that there's an accelerated version of this function + # shadowing this implementation later in this file. + if isinstance(fut, _PyFuture) and isinstance(waiter, _PyFuture): + if fut._Future__asyncio_awaited_by is not None: + fut._Future__asyncio_awaited_by.discard(waiter) + + +_py_future_add_to_awaited_by = future_add_to_awaited_by +_py_future_discard_from_awaited_by = future_discard_from_awaited_by + try: import _asyncio except ImportError: @@ -422,3 +475,7 @@ def wrap_future(future, *, loop=None): else: # _CFuture is needed for tests. Future = _CFuture = _asyncio.Future + future_add_to_awaited_by = _asyncio.future_add_to_awaited_by + future_discard_from_awaited_by = _asyncio.future_discard_from_awaited_by + _c_future_add_to_awaited_by = future_add_to_awaited_by + _c_future_discard_from_awaited_by = future_discard_from_awaited_by diff --git a/Lib/asyncio/graph.py b/Lib/asyncio/graph.py new file mode 100644 index 00000000000000..d8df7c9919abbf --- /dev/null +++ b/Lib/asyncio/graph.py @@ -0,0 +1,278 @@ +"""Introspection utils for tasks call graphs.""" + +import dataclasses +import sys +import types + +from . import events +from . import futures +from . import tasks + +__all__ = ( + 'capture_call_graph', + 'format_call_graph', + 'print_call_graph', + 'FrameCallGraphEntry', + 'FutureCallGraph', +) + +if False: # for type checkers + from typing import TextIO + +# Sadly, we can't re-use the traceback module's datastructures as those +# are tailored for error reporting, whereas we need to represent an +# async call graph. +# +# Going with pretty verbose names as we'd like to export them to the +# top level asyncio namespace, and want to avoid future name clashes. + + +@dataclasses.dataclass(frozen=True, slots=True) +class FrameCallGraphEntry: + frame: types.FrameType + + +@dataclasses.dataclass(frozen=True, slots=True) +class FutureCallGraph: + future: futures.Future + call_stack: tuple["FrameCallGraphEntry", ...] + awaited_by: tuple["FutureCallGraph", ...] + + +def _build_graph_for_future( + future: futures.Future, + *, + limit: int | None = None, +) -> FutureCallGraph: + if not isinstance(future, futures.Future): + raise TypeError( + f"{future!r} object does not appear to be compatible " + f"with asyncio.Future" + ) + + coro = None + if get_coro := getattr(future, 'get_coro', None): + coro = get_coro() if limit != 0 else None + + st: list[FrameCallGraphEntry] = [] + awaited_by: list[FutureCallGraph] = [] + + while coro is not None: + if hasattr(coro, 'cr_await'): + # A native coroutine or duck-type compatible iterator + st.append(FrameCallGraphEntry(coro.cr_frame)) + coro = coro.cr_await + elif hasattr(coro, 'ag_await'): + # A native async generator or duck-type compatible iterator + st.append(FrameCallGraphEntry(coro.cr_frame)) + coro = coro.ag_await + else: + break + + if future._asyncio_awaited_by: + for parent in future._asyncio_awaited_by: + awaited_by.append(_build_graph_for_future(parent, limit=limit)) + + if limit is not None: + if limit > 0: + st = st[:limit] + elif limit < 0: + st = st[limit:] + st.reverse() + return FutureCallGraph(future, tuple(st), tuple(awaited_by)) + + +def capture_call_graph( + future: futures.Future | None = None, + /, + *, + depth: int = 1, + limit: int | None = None, +) -> FutureCallGraph | None: + """Capture the async call graph for the current task or the provided Future. + + The graph is represented with three data structures: + + * FutureCallGraph(future, call_stack, awaited_by) + + Where 'future' is an instance of asyncio.Future or asyncio.Task. + + 'call_stack' is a tuple of FrameGraphEntry objects. + + 'awaited_by' is a tuple of FutureCallGraph objects. + + * FrameCallGraphEntry(frame) + + Where 'frame' is a frame object of a regular Python function + in the call stack. + + Receives an optional 'future' argument. If not passed, + the current task will be used. If there's no current task, the function + returns None. + + If "capture_call_graph()" is introspecting *the current task*, the + optional keyword-only 'depth' argument can be used to skip the specified + number of frames from top of the stack. + + If the optional keyword-only 'limit' argument is provided, each call stack + in the resulting graph is truncated to include at most ``abs(limit)`` + entries. If 'limit' is positive, the entries left are the closest to + the invocation point. If 'limit' is negative, the topmost entries are + left. If 'limit' is omitted or None, all entries are present. + If 'limit' is 0, the call stack is not captured at all, only + "awaited by" information is present. + """ + + loop = events._get_running_loop() + + if future is not None: + # Check if we're in a context of a running event loop; + # if yes - check if the passed future is the currently + # running task or not. + if loop is None or future is not tasks.current_task(loop=loop): + return _build_graph_for_future(future, limit=limit) + # else: future is the current task, move on. + else: + if loop is None: + raise RuntimeError( + 'capture_call_graph() is called outside of a running ' + 'event loop and no *future* to introspect was provided') + future = tasks.current_task(loop=loop) + + if future is None: + # This isn't a generic call stack introspection utility. If we + # can't determine the current task and none was provided, we + # just return. + return None + + if not isinstance(future, futures.Future): + raise TypeError( + f"{future!r} object does not appear to be compatible " + f"with asyncio.Future" + ) + + call_stack: list[FrameCallGraphEntry] = [] + + f = sys._getframe(depth) if limit != 0 else None + try: + while f is not None: + is_async = f.f_generator is not None + call_stack.append(FrameCallGraphEntry(f)) + + if is_async: + if f.f_back is not None and f.f_back.f_generator is None: + # We've reached the bottom of the coroutine stack, which + # must be the Task that runs it. + break + + f = f.f_back + finally: + del f + + awaited_by = [] + if future._asyncio_awaited_by: + for parent in future._asyncio_awaited_by: + awaited_by.append(_build_graph_for_future(parent, limit=limit)) + + if limit is not None: + limit *= -1 + if limit > 0: + call_stack = call_stack[:limit] + elif limit < 0: + call_stack = call_stack[limit:] + + return FutureCallGraph(future, tuple(call_stack), tuple(awaited_by)) + + +def format_call_graph( + future: futures.Future | None = None, + /, + *, + depth: int = 1, + limit: int | None = None, +) -> str: + """Return the async call graph as a string for `future`. + + If `future` is not provided, format the call graph for the current task. + """ + + def render_level(st: FutureCallGraph, buf: list[str], level: int) -> None: + def add_line(line: str) -> None: + buf.append(level * ' ' + line) + + if isinstance(st.future, tasks.Task): + add_line( + f'* Task(name={st.future.get_name()!r}, id={id(st.future):#x})' + ) + else: + add_line( + f'* Future(id={id(st.future):#x})' + ) + + if st.call_stack: + add_line( + f' + Call stack:' + ) + for ste in st.call_stack: + f = ste.frame + + if f.f_generator is None: + f = ste.frame + add_line( + f' | File {f.f_code.co_filename!r},' + f' line {f.f_lineno}, in' + f' {f.f_code.co_qualname}()' + ) + else: + c = f.f_generator + + try: + f = c.cr_frame + code = c.cr_code + tag = 'async' + except AttributeError: + try: + f = c.ag_frame + code = c.ag_code + tag = 'async generator' + except AttributeError: + f = c.gi_frame + code = c.gi_code + tag = 'generator' + + add_line( + f' | File {f.f_code.co_filename!r},' + f' line {f.f_lineno}, in' + f' {tag} {code.co_qualname}()' + ) + + if st.awaited_by: + add_line( + f' + Awaited by:' + ) + for fut in st.awaited_by: + render_level(fut, buf, level + 1) + + graph = capture_call_graph(future, depth=depth + 1, limit=limit) + if graph is None: + return "" + + buf: list[str] = [] + try: + render_level(graph, buf, 0) + finally: + # 'graph' has references to frames so we should + # make sure it's GC'ed as soon as we don't need it. + del graph + return '\n'.join(buf) + +def print_call_graph( + future: futures.Future | None = None, + /, + *, + file: TextIO | None = None, + depth: int = 1, + limit: int | None = None, +) -> None: + """Print the async call graph for the current task or the provided Future.""" + print(format_call_graph(future, depth=depth, limit=limit), file=file) diff --git a/Lib/asyncio/locks.py b/Lib/asyncio/locks.py index f2f8b7ec858096..fa3a94764b507a 100644 --- a/Lib/asyncio/locks.py +++ b/Lib/asyncio/locks.py @@ -485,7 +485,7 @@ class Barrier(mixins._LoopBoundMixin): def __init__(self, parties): """Create a barrier, initialised to 'parties' tasks.""" if parties < 1: - raise ValueError('parties must be > 0') + raise ValueError('parties must be >= 1') self._cond = Condition() # notify all tasks when state changes diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 50992a607b3a1c..22147451fa7ebd 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -1185,10 +1185,13 @@ def can_write_eof(self): return True def _call_connection_lost(self, exc): - super()._call_connection_lost(exc) - if self._empty_waiter is not None: - self._empty_waiter.set_exception( - ConnectionError("Connection is closed by peer")) + try: + super()._call_connection_lost(exc) + finally: + self._write_ready = None + if self._empty_waiter is not None: + self._empty_waiter.set_exception( + ConnectionError("Connection is closed by peer")) def _make_empty_waiter(self): if self._empty_waiter is not None: @@ -1203,7 +1206,6 @@ def _reset_empty_waiter(self): def close(self): self._read_ready_cb = None - self._write_ready = None super().close() diff --git a/Lib/asyncio/staggered.py b/Lib/asyncio/staggered.py index 0f4df8855a80b9..2ad65d8648e6c5 100644 --- a/Lib/asyncio/staggered.py +++ b/Lib/asyncio/staggered.py @@ -8,6 +8,7 @@ from . import exceptions as exceptions_mod from . import locks from . import tasks +from . import futures async def staggered_race(coro_fns, delay, *, loop=None): @@ -63,11 +64,32 @@ async def staggered_race(coro_fns, delay, *, loop=None): """ # TODO: when we have aiter() and anext(), allow async iterables in coro_fns. loop = loop or events.get_running_loop() + parent_task = tasks.current_task(loop) enum_coro_fns = enumerate(coro_fns) winner_result = None winner_index = None + unhandled_exceptions = [] exceptions = [] - running_tasks = [] + running_tasks = set() + on_completed_fut = None + + def task_done(task): + running_tasks.discard(task) + futures.future_discard_from_awaited_by(task, parent_task) + if ( + on_completed_fut is not None + and not on_completed_fut.done() + and not running_tasks + ): + on_completed_fut.set_result(None) + + if task.cancelled(): + return + + exc = task.exception() + if exc is None: + return + unhandled_exceptions.append(exc) async def run_one_coro(ok_to_start, previous_failed) -> None: # in eager tasks this waits for the calling task to append this task @@ -91,11 +113,12 @@ async def run_one_coro(ok_to_start, previous_failed) -> None: this_failed = locks.Event() next_ok_to_start = locks.Event() next_task = loop.create_task(run_one_coro(next_ok_to_start, this_failed)) - running_tasks.append(next_task) + futures.future_add_to_awaited_by(next_task, parent_task) + running_tasks.add(next_task) + next_task.add_done_callback(task_done) # next_task has been appended to running_tasks so next_task is ok to # start. next_ok_to_start.set() - assert len(running_tasks) == this_index + 2 # Prepare place to put this coroutine's exceptions if not won exceptions.append(None) assert len(exceptions) == this_index + 1 @@ -120,31 +143,37 @@ async def run_one_coro(ok_to_start, previous_failed) -> None: # up as done() == True, cancelled() == False, exception() == # asyncio.CancelledError. This behavior is specified in # https://bugs.python.org/issue30048 - for i, t in enumerate(running_tasks): - if i != this_index: + current_task = tasks.current_task(loop) + for t in running_tasks: + if t is not current_task: t.cancel() - ok_to_start = locks.Event() - first_task = loop.create_task(run_one_coro(ok_to_start, None)) - running_tasks.append(first_task) - # first_task has been appended to running_tasks so first_task is ok to start. - ok_to_start.set() + propagate_cancellation_error = None try: - # Wait for a growing list of tasks to all finish: poor man's version of - # curio's TaskGroup or trio's nursery - done_count = 0 - while done_count != len(running_tasks): - done, _ = await tasks.wait(running_tasks) - done_count = len(done) + ok_to_start = locks.Event() + first_task = loop.create_task(run_one_coro(ok_to_start, None)) + futures.future_add_to_awaited_by(first_task, parent_task) + running_tasks.add(first_task) + first_task.add_done_callback(task_done) + # first_task has been appended to running_tasks so first_task is ok to start. + ok_to_start.set() + propagate_cancellation_error = None + # Make sure no tasks are left running if we leave this function + while running_tasks: + on_completed_fut = loop.create_future() + try: + await on_completed_fut + except exceptions_mod.CancelledError as ex: + propagate_cancellation_error = ex + for task in running_tasks: + task.cancel(*ex.args) + on_completed_fut = None + if __debug__ and unhandled_exceptions: # If run_one_coro raises an unhandled exception, it's probably a # programming error, and I want to see it. - if __debug__: - for d in done: - if d.done() and not d.cancelled() and d.exception(): - raise d.exception() + raise ExceptionGroup("staggered race failed", unhandled_exceptions) + if propagate_cancellation_error is not None: + raise propagate_cancellation_error return winner_result, winner_index, exceptions finally: - del exceptions - # Make sure no tasks are left running if we leave this function - for t in running_tasks: - t.cancel() + del exceptions, propagate_cancellation_error, unhandled_exceptions, parent_task diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py index 9fa772ca9d02cc..1633478d1c87c2 100644 --- a/Lib/asyncio/taskgroups.py +++ b/Lib/asyncio/taskgroups.py @@ -6,6 +6,7 @@ from . import events from . import exceptions +from . import futures from . import tasks @@ -197,15 +198,20 @@ def create_task(self, coro, *, name=None, context=None): else: task = self._loop.create_task(coro, name=name, context=context) - # optimization: Immediately call the done callback if the task is + futures.future_add_to_awaited_by(task, self._parent_task) + + # Always schedule the done callback even if the task is # already done (e.g. if the coro was able to complete eagerly), - # and skip scheduling a done callback - if task.done(): - self._on_task_done(task) - else: - self._tasks.add(task) - task.add_done_callback(self._on_task_done) - return task + # otherwise if the task completes with an exception then it will cancel + # the current task too early. gh-128550, gh-128588 + self._tasks.add(task) + task.add_done_callback(self._on_task_done) + try: + return task + finally: + # gh-128552: prevent a refcycle of + # task.exception().__traceback__->TaskGroup.create_task->task + del task # Since Python 3.8 Tasks propagate all exceptions correctly, # except for KeyboardInterrupt and SystemExit which are @@ -225,6 +231,8 @@ def _abort(self): def _on_task_done(self, task): self._tasks.discard(task) + futures.future_discard_from_awaited_by(task, self._parent_task) + if self._on_completed_fut is not None and not self._tasks: if not self._on_completed_fut.done(): self._on_completed_fut.set_result(True) diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 2112dd4b99d17f..a25854cc4bd69e 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -322,6 +322,7 @@ def __step_run_and_handle_result(self, exc): self._loop.call_soon( self.__step, new_exc, context=self._context) else: + futures.future_add_to_awaited_by(result, self) result._asyncio_future_blocking = False result.add_done_callback( self.__wakeup, context=self._context) @@ -356,6 +357,7 @@ def __step_run_and_handle_result(self, exc): self = None # Needed to break cycles when an exception occurs. def __wakeup(self, future): + futures.future_discard_from_awaited_by(future, self) try: future.result() except BaseException as exc: @@ -502,6 +504,7 @@ async def _wait(fs, timeout, return_when, loop): if timeout is not None: timeout_handle = loop.call_later(timeout, _release_waiter, waiter) counter = len(fs) + cur_task = current_task() def _on_completion(f): nonlocal counter @@ -514,9 +517,11 @@ def _on_completion(f): timeout_handle.cancel() if not waiter.done(): waiter.set_result(None) + futures.future_discard_from_awaited_by(f, cur_task) for f in fs: f.add_done_callback(_on_completion) + futures.future_add_to_awaited_by(f, cur_task) try: await waiter @@ -802,10 +807,19 @@ def gather(*coros_or_futures, return_exceptions=False): outer.set_result([]) return outer - def _done_callback(fut): + loop = events._get_running_loop() + if loop is not None: + cur_task = current_task(loop) + else: + cur_task = None + + def _done_callback(fut, cur_task=cur_task): nonlocal nfinished nfinished += 1 + if cur_task is not None: + futures.future_discard_from_awaited_by(fut, cur_task) + if outer is None or outer.done(): if not fut.cancelled(): # Mark exception retrieved. @@ -862,7 +876,6 @@ def _done_callback(fut): nfuts = 0 nfinished = 0 done_futs = [] - loop = None outer = None # bpo-46672 for arg in coros_or_futures: if arg not in arg_to_fut: @@ -875,12 +888,13 @@ def _done_callback(fut): # can't control it, disable the "destroy pending task" # warning. fut._log_destroy_pending = False - nfuts += 1 arg_to_fut[arg] = fut if fut.done(): done_futs.append(fut) else: + if cur_task is not None: + futures.future_add_to_awaited_by(fut, cur_task) fut.add_done_callback(_done_callback) else: @@ -940,7 +954,15 @@ def shield(arg): loop = futures._get_loop(inner) outer = loop.create_future() - def _inner_done_callback(inner): + if loop is not None and (cur_task := current_task(loop)) is not None: + futures.future_add_to_awaited_by(inner, cur_task) + else: + cur_task = None + + def _inner_done_callback(inner, cur_task=cur_task): + if cur_task is not None: + futures.future_discard_from_awaited_by(inner, cur_task) + if outer.cancelled(): if not inner.cancelled(): # Mark inner's result as retrieved. diff --git a/Lib/asyncio/timeouts.py b/Lib/asyncio/timeouts.py index e6f5100691d362..09342dc7c1310b 100644 --- a/Lib/asyncio/timeouts.py +++ b/Lib/asyncio/timeouts.py @@ -1,7 +1,6 @@ import enum from types import TracebackType -from typing import final, Optional, Type from . import events from . import exceptions @@ -23,14 +22,13 @@ class _State(enum.Enum): EXITED = "finished" -@final class Timeout: """Asynchronous context manager for cancelling overdue coroutines. Use `timeout()` or `timeout_at()` rather than instantiating this class directly. """ - def __init__(self, when: Optional[float]) -> None: + def __init__(self, when: float | None) -> None: """Schedule a timeout that will trigger at a given loop time. - If `when` is `None`, the timeout will never trigger. @@ -39,15 +37,15 @@ def __init__(self, when: Optional[float]) -> None: """ self._state = _State.CREATED - self._timeout_handler: Optional[events.TimerHandle] = None - self._task: Optional[tasks.Task] = None + self._timeout_handler: events.TimerHandle | None = None + self._task: tasks.Task | None = None self._when = when - def when(self) -> Optional[float]: + def when(self) -> float | None: """Return the current deadline.""" return self._when - def reschedule(self, when: Optional[float]) -> None: + def reschedule(self, when: float | None) -> None: """Reschedule the timeout.""" if self._state is not _State.ENTERED: if self._state is _State.CREATED: @@ -96,10 +94,10 @@ async def __aenter__(self) -> "Timeout": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: assert self._state in (_State.ENTERED, _State.EXPIRING) if self._timeout_handler is not None: @@ -142,7 +140,7 @@ def _insert_timeout_error(exc_val: BaseException) -> None: exc_val = exc_val.__context__ -def timeout(delay: Optional[float]) -> Timeout: +def timeout(delay: float | None) -> Timeout: """Timeout async context manager. Useful in cases when you want to apply timeout logic around block @@ -162,7 +160,7 @@ def timeout(delay: Optional[float]) -> Timeout: return Timeout(loop.time() + delay if delay is not None else None) -def timeout_at(when: Optional[float]) -> Timeout: +def timeout_at(when: float | None) -> Timeout: """Schedule the timeout at absolute time. Like timeout() but argument gives absolute time in the same clock system diff --git a/Lib/base64.py b/Lib/base64.py index 61be4fb856e92c..5d78cc09f40cd3 100644 --- a/Lib/base64.py +++ b/Lib/base64.py @@ -4,7 +4,6 @@ # Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support # Modified 22-May-2007 by Guido van Rossum to use bytes everywhere -import re import struct import binascii @@ -284,7 +283,7 @@ def b16decode(s, casefold=False): s = _bytes_from_decode_data(s) if casefold: s = s.upper() - if re.search(b'[^0-9A-F]', s): + if s.translate(None, delete=b'0123456789ABCDEF'): raise binascii.Error('Non-base16 digit found') return binascii.unhexlify(s) diff --git a/Lib/bdb.py b/Lib/bdb.py index 81bba8a130f97c..a741628e32a981 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -4,6 +4,7 @@ import sys import os import weakref +from contextlib import contextmanager from inspect import CO_GENERATOR, CO_COROUTINE, CO_ASYNC_GENERATOR __all__ = ["BdbQuit", "Bdb", "Breakpoint"] @@ -65,6 +66,12 @@ def reset(self): self.botframe = None self._set_stopinfo(None, None) + @contextmanager + def set_enterframe(self, frame): + self.enterframe = frame + yield + self.enterframe = None + def trace_dispatch(self, frame, event, arg): """Dispatch a trace function for debugged frames based on the event. @@ -90,28 +97,27 @@ def trace_dispatch(self, frame, event, arg): The arg parameter depends on the previous event. """ - self.enterframe = frame - - if self.quitting: - return # None - if event == 'line': - return self.dispatch_line(frame) - if event == 'call': - return self.dispatch_call(frame, arg) - if event == 'return': - return self.dispatch_return(frame, arg) - if event == 'exception': - return self.dispatch_exception(frame, arg) - if event == 'c_call': - return self.trace_dispatch - if event == 'c_exception': - return self.trace_dispatch - if event == 'c_return': + with self.set_enterframe(frame): + if self.quitting: + return # None + if event == 'line': + return self.dispatch_line(frame) + if event == 'call': + return self.dispatch_call(frame, arg) + if event == 'return': + return self.dispatch_return(frame, arg) + if event == 'exception': + return self.dispatch_exception(frame, arg) + if event == 'c_call': + return self.trace_dispatch + if event == 'c_exception': + return self.trace_dispatch + if event == 'c_return': + return self.trace_dispatch + if event == 'opcode': + return self.dispatch_opcode(frame, arg) + print('bdb.Bdb.dispatch: unknown debugging event:', repr(event)) return self.trace_dispatch - if event == 'opcode': - return self.dispatch_opcode(frame, arg) - print('bdb.Bdb.dispatch: unknown debugging event:', repr(event)) - return self.trace_dispatch def dispatch_line(self, frame): """Invoke user function and return trace function for line event. @@ -395,15 +401,15 @@ def set_trace(self, frame=None): if frame is None: frame = sys._getframe().f_back self.reset() - self.enterframe = frame - while frame: - frame.f_trace = self.trace_dispatch - self.botframe = frame - self.frame_trace_lines_opcodes[frame] = (frame.f_trace_lines, frame.f_trace_opcodes) - # We need f_trace_lines == True for the debugger to work - frame.f_trace_lines = True - frame = frame.f_back - self.set_stepinstr() + with self.set_enterframe(frame): + while frame: + frame.f_trace = self.trace_dispatch + self.botframe = frame + self.frame_trace_lines_opcodes[frame] = (frame.f_trace_lines, frame.f_trace_opcodes) + # We need f_trace_lines == True for the debugger to work + frame.f_trace_lines = True + frame = frame.f_back + self.set_stepinstr() sys.settrace(self.trace_dispatch) def set_continue(self): diff --git a/Lib/configparser.py b/Lib/configparser.py index 420dce77c234e1..9dc4fa515cfcbe 100644 --- a/Lib/configparser.py +++ b/Lib/configparser.py @@ -1105,11 +1105,7 @@ def _handle_continuation_line(self, st, line, fpname): def _handle_rest(self, st, line, fpname): # a section header or option header? if self._allow_unnamed_section and st.cursect is None: - st.sectname = UNNAMED_SECTION - st.cursect = self._dict() - self._sections[st.sectname] = st.cursect - self._proxies[st.sectname] = SectionProxy(self, st.sectname) - st.elements_added.add(st.sectname) + self._handle_header(st, UNNAMED_SECTION, fpname) st.indent_level = st.cur_indent_level # is it a section header? @@ -1118,10 +1114,10 @@ def _handle_rest(self, st, line, fpname): if not mo and st.cursect is None: raise MissingSectionHeaderError(fpname, st.lineno, line) - self._handle_header(st, mo, fpname) if mo else self._handle_option(st, line, fpname) + self._handle_header(st, mo.group('header'), fpname) if mo else self._handle_option(st, line, fpname) - def _handle_header(self, st, mo, fpname): - st.sectname = mo.group('header') + def _handle_header(self, st, sectname, fpname): + st.sectname = sectname if st.sectname in self._sections: if self._strict and st.sectname in st.elements_added: raise DuplicateSectionError(st.sectname, fpname, diff --git a/Lib/csv.py b/Lib/csv.py index cd202659873811..0a627ba7a512fa 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -63,7 +63,6 @@ class excel: written as two quotes """ -import re import types from _csv import Error, writer, reader, register_dialect, \ unregister_dialect, get_dialect, list_dialects, \ @@ -281,6 +280,7 @@ def _guess_quote_and_delimiter(self, data, delimiters): If there is no quotechar the delimiter can't be determined this way. """ + import re matches = [] for restr in (r'(?P[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?P=delim)', # ,".*?", diff --git a/Lib/doctest.py b/Lib/doctest.py index bb281fc483c41c..e02e73ed722f7e 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -1558,7 +1558,7 @@ def out(s): save_displayhook = sys.displayhook sys.displayhook = sys.__displayhook__ saved_can_colorize = _colorize.can_colorize - _colorize.can_colorize = lambda: False + _colorize.can_colorize = lambda *args, **kwargs: False color_variables = {"PYTHON_COLORS": None, "FORCE_COLOR": None} for key in color_variables: color_variables[key] = os.environ.pop(key, None) diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index ec2215a5e5f33c..3d845c09d415f6 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -95,8 +95,16 @@ NLSET = {'\n', '\r'} SPECIALSNL = SPECIALS | NLSET + +def make_quoted_pairs(value): + """Escape dquote and backslash for use within a quoted-string.""" + return str(value).replace('\\', '\\\\').replace('"', '\\"') + + def quote_string(value): - return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"' + escaped = make_quoted_pairs(value) + return f'"{escaped}"' + # Match a RFC 2047 word, looks like =?utf-8?q?someword?= rfc2047_matcher = re.compile(r''' @@ -2905,6 +2913,15 @@ def _refold_parse_tree(parse_tree, *, policy): if not hasattr(part, 'encode'): # It's not a terminal, try folding the subparts. newparts = list(part) + if part.token_type == 'bare-quoted-string': + # To fold a quoted string we need to create a list of terminal + # tokens that will render the leading and trailing quotes + # and use quoted pairs in the value as appropriate. + newparts = ( + [ValueTerminal('"', 'ptext')] + + [ValueTerminal(make_quoted_pairs(p), 'ptext') + for p in newparts] + + [ValueTerminal('"', 'ptext')]) if not part.as_ew_allowed: wrap_as_ew_blocked += 1 newparts.append(end_ew_not_allowed) diff --git a/Lib/gettext.py b/Lib/gettext.py index a0d81cf846a05c..4c1f9427459b14 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -48,7 +48,6 @@ import operator import os -import re import sys @@ -70,22 +69,26 @@ # https://www.gnu.org/software/gettext/manual/gettext.html#Plural-forms # http://git.savannah.gnu.org/cgit/gettext.git/tree/gettext-runtime/intl/plural.y -_token_pattern = re.compile(r""" - (?P[ \t]+) | # spaces and horizontal tabs - (?P[0-9]+\b) | # decimal integer - (?Pn\b) | # only n is allowed - (?P[()]) | - (?P[-*/%+?:]|[>, - # <=, >=, ==, !=, &&, ||, - # ? : - # unary and bitwise ops - # not allowed - (?P\w+|.) # invalid token - """, re.VERBOSE|re.DOTALL) - +_token_pattern = None def _tokenize(plural): - for mo in re.finditer(_token_pattern, plural): + global _token_pattern + if _token_pattern is None: + import re + _token_pattern = re.compile(r""" + (?P[ \t]+) | # spaces and horizontal tabs + (?P[0-9]+\b) | # decimal integer + (?Pn\b) | # only n is allowed + (?P[()]) | + (?P[-*/%+?:]|[>, + # <=, >=, ==, !=, &&, ||, + # ? : + # unary and bitwise ops + # not allowed + (?P\w+|.) # invalid token + """, re.VERBOSE|re.DOTALL) + + for mo in _token_pattern.finditer(plural): kind = mo.lastgroup if kind == 'WHITESPACES': continue diff --git a/Lib/http/__init__.py b/Lib/http/__init__.py index 9f278289420713..691b4a9a367bd0 100644 --- a/Lib/http/__init__.py +++ b/Lib/http/__init__.py @@ -190,7 +190,7 @@ class HTTPMethod: Methods from the following RFCs are all observed: - * RFF 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 * RFC 5789: PATCH Method for HTTP """ def __new__(cls, value, description): diff --git a/Lib/http/client.py b/Lib/http/client.py index fab90a0ba4eb83..33a858d34ae1ba 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -472,7 +472,7 @@ def read(self, amt=None): if self.chunked: return self._read_chunked(amt) - if amt is not None: + if amt is not None and amt >= 0: if self.length is not None and amt > self.length: # clip the read to the "end of response" amt = self.length @@ -590,6 +590,8 @@ def _get_chunk_left(self): def _read_chunked(self, amt=None): assert self.chunked != _UNKNOWN + if amt is not None and amt < 0: + amt = None value = [] try: while (chunk_left := self._get_chunk_left()) is not None: diff --git a/Lib/http/cookies.py b/Lib/http/cookies.py index 23d5461f86fc23..694b1b09a0567c 100644 --- a/Lib/http/cookies.py +++ b/Lib/http/cookies.py @@ -264,11 +264,12 @@ class Morsel(dict): "httponly" : "HttpOnly", "version" : "Version", "samesite" : "SameSite", + "partitioned": "Partitioned", } _reserved_defaults = dict.fromkeys(_reserved, "") - _flags = {'secure', 'httponly'} + _flags = {'secure', 'httponly', 'partitioned'} def __init__(self): # Set defaults diff --git a/Lib/idlelib/idle_test/test_configdialog.py b/Lib/idlelib/idle_test/test_configdialog.py index 5099d093382445..2773ed7ce614b5 100644 --- a/Lib/idlelib/idle_test/test_configdialog.py +++ b/Lib/idlelib/idle_test/test_configdialog.py @@ -98,8 +98,8 @@ def test_click_help(self): dialog.buttons['Help'].invoke() title, contents = view.kwds['title'], view.kwds['contents'] self.assertEqual(title, 'Help for IDLE preferences') - self.assertTrue(contents.startswith('When you click') and - contents.endswith('a different name.\n')) + self.assertStartsWith(contents, 'When you click') + self.assertEndsWith(contents,'a different name.\n') class FontPageTest(unittest.TestCase): diff --git a/Lib/idlelib/idle_test/test_debugger.py b/Lib/idlelib/idle_test/test_debugger.py index d1c9638dd5d711..9ca3b332648b31 100644 --- a/Lib/idlelib/idle_test/test_debugger.py +++ b/Lib/idlelib/idle_test/test_debugger.py @@ -256,7 +256,7 @@ def test_init(self): flist = None master_window = self.root sv = debugger.StackViewer(master_window, flist, gui) - self.assertTrue(hasattr(sv, 'stack')) + self.assertHasAttr(sv, 'stack') def test_load_stack(self): # Test the .load_stack() method against a fixed test stack. diff --git a/Lib/idlelib/idle_test/test_grep.py b/Lib/idlelib/idle_test/test_grep.py index a0b5b69171879c..d67dba76911fcf 100644 --- a/Lib/idlelib/idle_test/test_grep.py +++ b/Lib/idlelib/idle_test/test_grep.py @@ -143,7 +143,7 @@ def test_found(self): self.assertIn(pat, lines[0]) self.assertIn('py: 1:', lines[1]) # line number 1 self.assertIn('2', lines[3]) # hits found 2 - self.assertTrue(lines[4].startswith('(Hint:')) + self.assertStartsWith(lines[4], '(Hint:') class Default_commandTest(unittest.TestCase): diff --git a/Lib/idlelib/idle_test/test_multicall.py b/Lib/idlelib/idle_test/test_multicall.py index b3a3bfb88f9c31..67f28db6b0875c 100644 --- a/Lib/idlelib/idle_test/test_multicall.py +++ b/Lib/idlelib/idle_test/test_multicall.py @@ -27,7 +27,7 @@ def tearDownClass(cls): def test_creator(self): mc = self.mc self.assertIs(multicall._multicall_dict[Text], mc) - self.assertTrue(issubclass(mc, Text)) + self.assertIsSubclass(mc, Text) mc2 = multicall.MultiCallCreator(Text) self.assertIs(mc, mc2) diff --git a/Lib/idlelib/idle_test/test_query.py b/Lib/idlelib/idle_test/test_query.py index bb12b2b08652d5..a6ef858a8c954a 100644 --- a/Lib/idlelib/idle_test/test_query.py +++ b/Lib/idlelib/idle_test/test_query.py @@ -134,10 +134,10 @@ def test_c_source_name(self): def test_good_module_name(self): dialog = self.Dummy_ModuleName('idlelib') - self.assertTrue(dialog.entry_ok().endswith('__init__.py')) + self.assertEndsWith(dialog.entry_ok(), '__init__.py') self.assertEqual(dialog.entry_error['text'], '') dialog = self.Dummy_ModuleName('idlelib.idle') - self.assertTrue(dialog.entry_ok().endswith('idle.py')) + self.assertEndsWith(dialog.entry_ok(), 'idle.py') self.assertEqual(dialog.entry_error['text'], '') @@ -389,7 +389,7 @@ def test_click_module_name(self): self.assertEqual(dialog.text0, 'idlelib') self.assertEqual(dialog.entry.get(), 'idlelib') dialog.button_ok.invoke() - self.assertTrue(dialog.result.endswith('__init__.py')) + self.assertEndsWith(dialog.result, '__init__.py') root.destroy() diff --git a/Lib/idlelib/idle_test/test_redirector.py b/Lib/idlelib/idle_test/test_redirector.py index a97b3002afcf12..bd486d7da66010 100644 --- a/Lib/idlelib/idle_test/test_redirector.py +++ b/Lib/idlelib/idle_test/test_redirector.py @@ -34,7 +34,7 @@ def test_close(self): redir.register('insert', Func) redir.close() self.assertEqual(redir._operations, {}) - self.assertFalse(hasattr(self.text, 'widget')) + self.assertNotHasAttr(self.text, 'widget') class WidgetRedirectorTest(unittest.TestCase): diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py index 605e7a892570d7..4157a4b4dcdd2a 100644 --- a/Lib/idlelib/idle_test/test_sidebar.py +++ b/Lib/idlelib/idle_test/test_sidebar.py @@ -725,7 +725,7 @@ def test_copy(self): text.tag_add('sel', f'{first_line}.0', 'end-1c') selected_text = text.get('sel.first', 'sel.last') - self.assertTrue(selected_text.startswith('if True:\n')) + self.assertStartsWith(selected_text, 'if True:\n') self.assertIn('\n1\n', selected_text) text.event_generate('<>') @@ -749,7 +749,7 @@ def test_copy_with_prompts(self): text.tag_add('sel', f'{first_line}.3', 'end-1c') selected_text = text.get('sel.first', 'sel.last') - self.assertTrue(selected_text.startswith('True:\n')) + self.assertStartsWith(selected_text, 'True:\n') selected_lines_text = text.get('sel.first linestart', 'sel.last') selected_lines = selected_lines_text.split('\n') diff --git a/Lib/imaplib.py b/Lib/imaplib.py index e576c29e67dc0a..db708580a0abf6 100644 --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -52,6 +52,9 @@ # search command can be quite large, so we now use 1M. _MAXLINE = 1000000 +# Data larger than this will be read in chunks, to prevent extreme +# overallocation. +_SAFE_BUF_SIZE = 1 << 20 # Commands @@ -315,7 +318,13 @@ def open(self, host='', port=IMAP4_PORT, timeout=None): def read(self, size): """Read 'size' bytes from remote.""" - return self.file.read(size) + cursize = min(size, _SAFE_BUF_SIZE) + data = self.file.read(cursize) + while cursize < size and len(data) == cursize: + delta = min(cursize, size - cursize) + data += self.file.read(delta) + cursize += delta + return data def readline(self): diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index fa36159711846f..8bcd741c446bd2 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -716,6 +716,12 @@ def _search_registry(cls, fullname): @classmethod def find_spec(cls, fullname, path=None, target=None): + _warnings.warn('importlib.machinery.WindowsRegistryFinder is ' + 'deprecated; use site configuration instead. ' + 'Future versions of Python may not enable this ' + 'finder by default.', + DeprecationWarning, stacklevel=2) + filepath = cls._search_registry(fullname) if filepath is None: return None @@ -1238,7 +1244,7 @@ def _path_importer_cache(cls, path): if path == '': try: path = _os.getcwd() - except FileNotFoundError: + except (FileNotFoundError, PermissionError): # Don't cache the failure as the cwd can easily change to # a valid directory later on. return None diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index eea6b38af6fa13..29f01f77eff4a0 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -70,6 +70,15 @@ class ResourceLoader(Loader): """ + def __init__(self): + import warnings + warnings.warn('importlib.abc.ResourceLoader is deprecated in ' + 'favour of supporting resource loading through ' + 'importlib.resources.abc.TraversableResources.', + DeprecationWarning, stacklevel=2) + super().__init__() + + @abc.abstractmethod def get_data(self, path): """Abstract method which when implemented should return the bytes for @@ -199,6 +208,10 @@ class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLo def path_mtime(self, path): """Return the (int) modification time for the path (str).""" + import warnings + warnings.warn('SourceLoader.path_mtime is deprecated in favour of ' + 'SourceLoader.path_stats().', + DeprecationWarning, stacklevel=2) if self.path_stats.__func__ is SourceLoader.path_stats: raise OSError return int(self.path_stats(path)['mtime']) diff --git a/Lib/importlib/machinery.py b/Lib/importlib/machinery.py index 6e294d59bfdcb9..63d726445c3d96 100644 --- a/Lib/importlib/machinery.py +++ b/Lib/importlib/machinery.py @@ -3,9 +3,11 @@ from ._bootstrap import ModuleSpec from ._bootstrap import BuiltinImporter from ._bootstrap import FrozenImporter -from ._bootstrap_external import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES, - OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES, - EXTENSION_SUFFIXES) +from ._bootstrap_external import ( + SOURCE_SUFFIXES, BYTECODE_SUFFIXES, EXTENSION_SUFFIXES, + DEBUG_BYTECODE_SUFFIXES as _DEBUG_BYTECODE_SUFFIXES, + OPTIMIZED_BYTECODE_SUFFIXES as _OPTIMIZED_BYTECODE_SUFFIXES +) from ._bootstrap_external import WindowsRegistryFinder from ._bootstrap_external import PathFinder from ._bootstrap_external import FileFinder @@ -27,3 +29,22 @@ def all_suffixes(): 'NamespaceLoader', 'OPTIMIZED_BYTECODE_SUFFIXES', 'PathFinder', 'SOURCE_SUFFIXES', 'SourceFileLoader', 'SourcelessFileLoader', 'WindowsRegistryFinder', 'all_suffixes'] + + +def __getattr__(name): + import warnings + + if name == 'DEBUG_BYTECODE_SUFFIXES': + warnings.warn('importlib.machinery.DEBUG_BYTECODE_SUFFIXES is ' + 'deprecated; use importlib.machinery.BYTECODE_SUFFIXES ' + 'instead.', + DeprecationWarning, stacklevel=2) + return _DEBUG_BYTECODE_SUFFIXES + elif name == 'OPTIMIZED_BYTECODE_SUFFIXES': + warnings.warn('importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES is ' + 'deprecated; use importlib.machinery.BYTECODE_SUFFIXES ' + 'instead.', + DeprecationWarning, stacklevel=2) + return _OPTIMIZED_BYTECODE_SUFFIXES + + raise AttributeError(f'module {__name__!r} has no attribute {name!r}') diff --git a/Lib/importlib/resources/__init__.py b/Lib/importlib/resources/__init__.py index ec4441c9116118..723c9f9eb33ce1 100644 --- a/Lib/importlib/resources/__init__.py +++ b/Lib/importlib/resources/__init__.py @@ -1,4 +1,11 @@ -"""Read resources contained within a package.""" +""" +Read resources contained within a package. + +This codebase is shared between importlib.resources in the stdlib +and importlib_resources in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" from ._common import ( as_file, diff --git a/Lib/importlib/resources/_common.py b/Lib/importlib/resources/_common.py index c2c92254370f71..4e9014c45a056e 100644 --- a/Lib/importlib/resources/_common.py +++ b/Lib/importlib/resources/_common.py @@ -66,10 +66,10 @@ def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]: # zipimport.zipimporter does not support weak references, resulting in a # TypeError. That seems terrible. spec = package.__spec__ - reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore + reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore[union-attr] if reader is None: return None - return reader(spec.name) # type: ignore + return reader(spec.name) # type: ignore[union-attr] @functools.singledispatch diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py index ccc5abbeb4e56e..70fc7e2b9c0145 100644 --- a/Lib/importlib/resources/readers.py +++ b/Lib/importlib/resources/readers.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import collections import contextlib import itertools @@ -6,6 +8,7 @@ import re import warnings import zipfile +from collections.abc import Iterator from . import abc @@ -135,27 +138,31 @@ class NamespaceReader(abc.TraversableResources): def __init__(self, namespace_path): if 'NamespacePath' not in str(namespace_path): raise ValueError('Invalid path') - self.path = MultiplexedPath(*map(self._resolve, namespace_path)) + self.path = MultiplexedPath(*filter(bool, map(self._resolve, namespace_path))) @classmethod - def _resolve(cls, path_str) -> abc.Traversable: + def _resolve(cls, path_str) -> abc.Traversable | None: r""" Given an item from a namespace path, resolve it to a Traversable. path_str might be a directory on the filesystem or a path to a zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``. + + path_str might also be a sentinel used by editable packages to + trigger other behaviors (see python/importlib_resources#311). + In that case, return None. """ - (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir()) - return dir + dirs = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir()) + return next(dirs, None) @classmethod - def _candidate_paths(cls, path_str): + def _candidate_paths(cls, path_str: str) -> Iterator[abc.Traversable]: yield pathlib.Path(path_str) yield from cls._resolve_zip_path(path_str) @staticmethod - def _resolve_zip_path(path_str): + def _resolve_zip_path(path_str: str): for match in reversed(list(re.finditer(r'[\\/]', path_str))): with contextlib.suppress( FileNotFoundError, diff --git a/Lib/importlib/resources/simple.py b/Lib/importlib/resources/simple.py index 96f117fec62c10..2e75299b13aabf 100644 --- a/Lib/importlib/resources/simple.py +++ b/Lib/importlib/resources/simple.py @@ -77,7 +77,7 @@ class ResourceHandle(Traversable): def __init__(self, parent: ResourceContainer, name: str): self.parent = parent - self.name = name # type: ignore + self.name = name # type: ignore[misc] def is_file(self): return True diff --git a/Lib/inspect.py b/Lib/inspect.py index 5b7c4df8927c87..facad478103668 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -858,8 +858,7 @@ def getsourcefile(object): Return None if no way can be identified to get the source. """ filename = getfile(object) - all_bytecode_suffixes = importlib.machinery.DEBUG_BYTECODE_SUFFIXES[:] - all_bytecode_suffixes += importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES[:] + all_bytecode_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:] if any(filename.endswith(s) for s in all_bytecode_suffixes): filename = (os.path.splitext(filename)[0] + importlib.machinery.SOURCE_SUFFIXES[0]) diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 1cba64fd554100..017c9ab409b7bc 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -855,7 +855,7 @@ class SysLogHandler(logging.Handler): } def __init__(self, address=('localhost', SYSLOG_UDP_PORT), - facility=LOG_USER, socktype=None): + facility=LOG_USER, socktype=None, timeout=None): """ Initialize a handler. @@ -872,6 +872,7 @@ def __init__(self, address=('localhost', SYSLOG_UDP_PORT), self.address = address self.facility = facility self.socktype = socktype + self.timeout = timeout self.socket = None self.createSocket() @@ -933,6 +934,8 @@ def createSocket(self): err = sock = None try: sock = socket.socket(af, socktype, proto) + if self.timeout: + sock.settimeout(self.timeout) if socktype == socket.SOCK_STREAM: sock.connect(sa) break diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 710aba9685efda..d429212d447380 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -853,7 +853,7 @@ def PipeClient(address): _LEGACY_LENGTHS = (_MD5ONLY_MESSAGE_LENGTH, _MD5_DIGEST_LEN) -def _get_digest_name_and_payload(message: bytes) -> (str, bytes): +def _get_digest_name_and_payload(message): # type: (bytes) -> tuple[str, bytes] """Returns a digest name and the payload for a response hash. If a legacy protocol is detected based on the message length diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py index df9b9be9d1898b..681af2610e9b37 100644 --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -382,13 +382,14 @@ def _serve_one(child_r, fds, unused_fds, handlers): # def read_signed(fd): - data = b'' - length = SIGNED_STRUCT.size - while len(data) < length: - s = os.read(fd, length - len(data)) - if not s: + data = bytearray(SIGNED_STRUCT.size) + unread = memoryview(data) + while unread: + count = os.readinto(fd, unread) + if count == 0: raise EOFError('unexpected EOF') - data += s + unread = unread[count:] + return SIGNED_STRUCT.unpack(data)[0] def write_signed(fd, n): diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py index 4f72373c951abc..edd6c2543a7435 100644 --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -359,7 +359,7 @@ def wait(self, timeout=None): return True return False - def __repr__(self) -> str: + def __repr__(self): set_status = 'set' if self.is_set() else 'unset' return f"<{type(self).__qualname__} at {id(self):#x} {set_status}>" # diff --git a/Lib/opcode.py b/Lib/opcode.py index 974f4d35e2a524..4ee0d64026bd0a 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -17,8 +17,9 @@ EXTENDED_ARG = opmap['EXTENDED_ARG'] opname = ['<%r>' % (op,) for op in range(max(opmap.values()) + 1)] -for op, i in opmap.items(): - opname[i] = op +for m in (opmap, _specialized_opmap): + for op, i in m.items(): + opname[i] = op cmp_op = ('<', '<=', '==', '!=', '>', '>=') @@ -51,6 +52,7 @@ }, "BINARY_OP": { "counter": 1, + "descr": 4, }, "UNPACK_SEQUENCE": { "counter": 1, diff --git a/Lib/optparse.py b/Lib/optparse.py index cbe3451ced8bc3..38cf16d21efffa 100644 --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -74,7 +74,6 @@ """ import sys, os -import textwrap from gettext import gettext as _, ngettext @@ -252,6 +251,7 @@ def _format_text(self, text): Format a paragraph of free-form text for inclusion in the help output at the current indentation level. """ + import textwrap text_width = max(self.width - self.current_indent, 11) indent = " "*self.current_indent return textwrap.fill(text, @@ -308,6 +308,7 @@ def format_option(self, option): indent_first = 0 result.append(opts) if option.help: + import textwrap help_text = self.expand_default(option) help_lines = textwrap.wrap(help_text, self.help_width) result.append("%*s%s\n" % (indent_first, "", help_lines[0])) diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index 7de2bb066f8f99..e498dc78e83b5e 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -7,11 +7,12 @@ it's developed alongside pathlib. If it finds success and maturity as a PyPI package, it could become a public part of the standard library. -Two base classes are defined here -- PurePathBase and PathBase -- that -resemble pathlib's PurePath and Path respectively. +Three base classes are defined here -- JoinablePath, ReadablePath and +WritablePath. """ import functools +import io import operator import posixpath from errno import EINVAL @@ -41,6 +42,40 @@ def _explode_path(path): return path, names +def magic_open(path, mode='r', buffering=-1, encoding=None, errors=None, + newline=None): + """ + Open the file pointed to by this path and return a file object, as + the built-in open() function does. + """ + try: + return io.open(path, mode, buffering, encoding, errors, newline) + except TypeError: + pass + cls = type(path) + text = 'b' not in mode + mode = ''.join(sorted(c for c in mode if c not in 'bt')) + if text: + try: + attr = getattr(cls, f'__open_{mode}__') + except AttributeError: + pass + else: + return attr(path, buffering, encoding, errors, newline) + + try: + attr = getattr(cls, f'__open_{mode}b__') + except AttributeError: + pass + else: + stream = attr(path, buffering) + if text: + stream = io.TextIOWrapper(stream, encoding, errors, newline) + return stream + + raise TypeError(f"{cls.__name__} can't be opened with mode {mode!r}") + + class PathGlobber(_GlobberBase): """ Class providing shell-style globbing for path objects. @@ -56,33 +91,17 @@ def concat_path(path, text): return path.with_segments(str(path) + text) -class CopyWorker: +class CopyReader: """ - Class that implements copying between path objects. An instance of this - class is available from the PathBase.copy property; it's made callable so - that PathBase.copy() can be treated as a method. - - The target path's CopyWorker drives the process from its _create() method. - Files and directories are exchanged by calling methods on the source and - target paths, and metadata is exchanged by calling - source.copy._read_metadata() and target.copy._write_metadata(). + Class that implements the "read" part of copying between path objects. + An instance of this class is available from the ReadablePath._copy_reader + property. """ __slots__ = ('_path',) def __init__(self, path): self._path = path - def __call__(self, target, follow_symlinks=True, dirs_exist_ok=False, - preserve_metadata=False): - """ - Recursively copy this file or directory tree to the given destination. - """ - if not isinstance(target, PathBase): - target = self._path.with_segments(target) - - # Delegate to the target path's CopyWorker object. - return target.copy._create(self._path, follow_symlinks, dirs_exist_ok, preserve_metadata) - _readable_metakeys = frozenset() def _read_metadata(self, metakeys, *, follow_symlinks=True): @@ -91,6 +110,18 @@ def _read_metadata(self, metakeys, *, follow_symlinks=True): """ raise NotImplementedError + +class CopyWriter: + """ + Class that implements the "write" part of copying between path objects. An + instance of this class is available from the WritablePath._copy_writer + property. + """ + __slots__ = ('_path',) + + def __init__(self, path): + self._path = path + _writable_metakeys = frozenset() def _write_metadata(self, metadata, *, follow_symlinks=True): @@ -102,7 +133,7 @@ def _write_metadata(self, metadata, *, follow_symlinks=True): def _create(self, source, follow_symlinks, dirs_exist_ok, preserve_metadata): self._ensure_distinct_path(source) if preserve_metadata: - metakeys = self._writable_metakeys & source.copy._readable_metakeys + metakeys = self._writable_metakeys & source._copy_reader._readable_metakeys else: metakeys = None if not follow_symlinks and source.is_symlink(): @@ -120,22 +151,22 @@ def _create_dir(self, source, metakeys, follow_symlinks, dirs_exist_ok): for src in children: dst = self._path.joinpath(src.name) if not follow_symlinks and src.is_symlink(): - dst.copy._create_symlink(src, metakeys) + dst._copy_writer._create_symlink(src, metakeys) elif src.is_dir(): - dst.copy._create_dir(src, metakeys, follow_symlinks, dirs_exist_ok) + dst._copy_writer._create_dir(src, metakeys, follow_symlinks, dirs_exist_ok) else: - dst.copy._create_file(src, metakeys) + dst._copy_writer._create_file(src, metakeys) if metakeys: - metadata = source.copy._read_metadata(metakeys) + metadata = source._copy_reader._read_metadata(metakeys) if metadata: self._write_metadata(metadata) def _create_file(self, source, metakeys): """Copy the given file to our path.""" self._ensure_different_file(source) - with source.open('rb') as source_f: + with magic_open(source, 'rb') as source_f: try: - with self._path.open('wb') as target_f: + with magic_open(self._path, 'wb') as target_f: copyfileobj(source_f, target_f) except IsADirectoryError as e: if not self._path.exists(): @@ -144,7 +175,7 @@ def _create_file(self, source, metakeys): f'Directory does not exist: {self._path}') from e raise if metakeys: - metadata = source.copy._read_metadata(metakeys) + metadata = source._copy_reader._read_metadata(metakeys) if metadata: self._write_metadata(metadata) @@ -152,7 +183,7 @@ def _create_symlink(self, source, metakeys): """Copy the given symbolic link to our path.""" self._path.symlink_to(source.readlink()) if metakeys: - metadata = source.copy._read_metadata(metakeys, follow_symlinks=False) + metadata = source._copy_reader._read_metadata(metakeys, follow_symlinks=False) if metadata: self._write_metadata(metadata, follow_symlinks=False) @@ -182,7 +213,7 @@ def _ensure_distinct_path(self, source): raise err -class PurePathBase: +class JoinablePath: """Base class for pure path objects. This class *does not* provide several magic methods that are defined in @@ -327,39 +358,12 @@ def parents(self): parent = split(path)[0] return tuple(parents) - def match(self, path_pattern, *, case_sensitive=None): - """ - Return True if this path matches the given pattern. If the pattern is - relative, matching is done from the right; otherwise, the entire path - is matched. The recursive wildcard '**' is *not* supported by this - method. - """ - if not isinstance(path_pattern, PurePathBase): - path_pattern = self.with_segments(path_pattern) - if case_sensitive is None: - case_sensitive = _is_case_sensitive(self.parser) - sep = path_pattern.parser.sep - path_parts = self.parts[::-1] - pattern_parts = path_pattern.parts[::-1] - if not pattern_parts: - raise ValueError("empty pattern") - if len(path_parts) < len(pattern_parts): - return False - if len(path_parts) > len(pattern_parts) and path_pattern.anchor: - return False - globber = PathGlobber(sep, case_sensitive) - for path_part, pattern_part in zip(path_parts, pattern_parts): - match = globber.compile(pattern_part) - if match(path_part) is None: - return False - return True - def full_match(self, pattern, *, case_sensitive=None): """ Return True if this path matches the given glob-style pattern. The pattern is matched against the entire path. """ - if not isinstance(pattern, PurePathBase): + if not isinstance(pattern, JoinablePath): pattern = self.with_segments(pattern) if case_sensitive is None: case_sensitive = _is_case_sensitive(self.parser) @@ -369,7 +373,7 @@ def full_match(self, pattern, *, case_sensitive=None): -class PathBase(PurePathBase): +class ReadablePath(JoinablePath): """Base class for concrete path objects. This class provides dummy implementations for many methods that derived @@ -412,11 +416,10 @@ def is_symlink(self): """ raise NotImplementedError - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): + def __open_rb__(self, buffering=-1): """ - Open the file pointed to by this path and return a file object, as - the built-in open() function does. + Open the file pointed to by this path for reading in binary mode and + return a file object, like open(mode='rb'). """ raise NotImplementedError @@ -424,35 +427,16 @@ def read_bytes(self): """ Open the file in bytes mode, read it, and close the file. """ - with self.open(mode='rb', buffering=0) as f: + with magic_open(self, mode='rb', buffering=0) as f: return f.read() def read_text(self, encoding=None, errors=None, newline=None): """ Open the file in text mode, read it, and close the file. """ - with self.open(mode='r', encoding=encoding, errors=errors, newline=newline) as f: + with magic_open(self, mode='r', encoding=encoding, errors=errors, newline=newline) as f: return f.read() - def write_bytes(self, data): - """ - Open the file in bytes mode, write to it, and close the file. - """ - # type-check for the buffer interface before truncating the file - view = memoryview(data) - with self.open(mode='wb') as f: - return f.write(view) - - def write_text(self, data, encoding=None, errors=None, newline=None): - """ - Open the file in text mode, write to it, and close the file. - """ - if not isinstance(data, str): - raise TypeError('data must be str, not %s' % - data.__class__.__name__) - with self.open(mode='w', encoding=encoding, errors=errors, newline=newline) as f: - return f.write(data) - def _scandir(self): """Yield os.DirEntry-like objects of the directory contents. @@ -474,7 +458,7 @@ def glob(self, pattern, *, case_sensitive=None, recurse_symlinks=True): """Iterate over this subtree and yield all existing files (of any kind, including directories) matching the given relative pattern. """ - if not isinstance(pattern, PurePathBase): + if not isinstance(pattern, JoinablePath): pattern = self.with_segments(pattern) anchor, parts = _explode_path(pattern) if anchor: @@ -496,7 +480,7 @@ def rglob(self, pattern, *, case_sensitive=None, recurse_symlinks=True): directories) matching the given relative pattern, anywhere in this subtree. """ - if not isinstance(pattern, PurePathBase): + if not isinstance(pattern, JoinablePath): pattern = self.with_segments(pattern) pattern = '**' / pattern return self.glob(pattern, case_sensitive=case_sensitive, recurse_symlinks=recurse_symlinks) @@ -543,20 +527,22 @@ def readlink(self): """ raise NotImplementedError - def symlink_to(self, target, target_is_directory=False): - """ - Make this path a symlink pointing to the target path. - Note the order of arguments (link, target) is the reverse of os.symlink. - """ - raise NotImplementedError + _copy_reader = property(CopyReader) - def mkdir(self, mode=0o777, parents=False, exist_ok=False): + def copy(self, target, follow_symlinks=True, dirs_exist_ok=False, + preserve_metadata=False): """ - Create a new directory at this given path. + Recursively copy this file or directory tree to the given destination. """ - raise NotImplementedError + if not hasattr(target, '_copy_writer'): + target = self.with_segments(target) - copy = property(CopyWorker, doc=CopyWorker.__call__.__doc__) + # Delegate to the target path's CopyWriter object. + try: + create = target._copy_writer._create + except AttributeError: + raise TypeError(f"Target is not writable: {target}") from None + return create(self, follow_symlinks, dirs_exist_ok, preserve_metadata) def copy_into(self, target_dir, *, follow_symlinks=True, dirs_exist_ok=False, preserve_metadata=False): @@ -566,10 +552,55 @@ def copy_into(self, target_dir, *, follow_symlinks=True, name = self.name if not name: raise ValueError(f"{self!r} has an empty name") - elif isinstance(target_dir, PathBase): + elif hasattr(target_dir, '_copy_writer'): target = target_dir / name else: target = self.with_segments(target_dir, name) return self.copy(target, follow_symlinks=follow_symlinks, dirs_exist_ok=dirs_exist_ok, preserve_metadata=preserve_metadata) + + +class WritablePath(JoinablePath): + __slots__ = () + + def symlink_to(self, target, target_is_directory=False): + """ + Make this path a symlink pointing to the target path. + Note the order of arguments (link, target) is the reverse of os.symlink. + """ + raise NotImplementedError + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + """ + Create a new directory at this given path. + """ + raise NotImplementedError + + def __open_wb__(self, buffering=-1): + """ + Open the file pointed to by this path for writing in binary mode and + return a file object, like open(mode='wb'). + """ + raise NotImplementedError + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + # type-check for the buffer interface before truncating the file + view = memoryview(data) + with magic_open(self, mode='wb') as f: + return f.write(view) + + def write_text(self, data, encoding=None, errors=None, newline=None): + """ + Open the file in text mode, write to it, and close the file. + """ + if not isinstance(data, str): + raise TypeError('data must be str, not %s' % + data.__class__.__name__) + with magic_open(self, mode='w', encoding=encoding, errors=errors, newline=newline) as f: + return f.write(data) + + _copy_writer = property(CopyWriter) diff --git a/Lib/pathlib/_local.py b/Lib/pathlib/_local.py index 1da85ddea24376..b3ec934f7510de 100644 --- a/Lib/pathlib/_local.py +++ b/Lib/pathlib/_local.py @@ -20,7 +20,7 @@ grp = None from pathlib._os import copyfile -from pathlib._abc import CopyWorker, PurePathBase, PathBase +from pathlib._abc import CopyReader, CopyWriter, JoinablePath, ReadablePath, WritablePath __all__ = [ @@ -65,9 +65,10 @@ def __repr__(self): return "<{}.parents>".format(type(self._path).__name__) -class _LocalCopyWorker(CopyWorker): - """This object implements the Path.copy callable. Don't try to construct - it yourself.""" +class _LocalCopyReader(CopyReader): + """This object implements the "read" part of copying local paths. Don't + try to construct it yourself. + """ __slots__ = () _readable_metakeys = {'mode', 'times_ns'} @@ -75,7 +76,7 @@ class _LocalCopyWorker(CopyWorker): _readable_metakeys.add('flags') if hasattr(os, 'listxattr'): _readable_metakeys.add('xattrs') - _readable_metakeys = _writable_metakeys = frozenset(_readable_metakeys) + _readable_metakeys = frozenset(_readable_metakeys) def _read_metadata(self, metakeys, *, follow_symlinks=True): metadata = {} @@ -97,6 +98,15 @@ def _read_metadata(self, metakeys, *, follow_symlinks=True): raise return metadata + +class _LocalCopyWriter(CopyWriter): + """This object implements the "write" part of copying local paths. Don't + try to construct it yourself. + """ + __slots__ = () + + _writable_metakeys = _LocalCopyReader._readable_metakeys + def _write_metadata(self, metadata, *, follow_symlinks=True): def _nop(*args, ns=None, follow_symlinks=None): pass @@ -158,7 +168,7 @@ def _create_file(self, source, metakeys): try: source = os.fspath(source) except TypeError: - if not isinstance(source, PathBase): + if not isinstance(source, WritablePath): raise super()._create_file(source, metakeys) else: @@ -171,7 +181,7 @@ def _create_symlink(self, source, metakeys): """Copy the given symlink to the given target.""" self._path.symlink_to(source.readlink(), source.is_dir()) if metakeys: - metadata = source.copy._read_metadata(metakeys, follow_symlinks=False) + metadata = source._copy_reader._read_metadata(metakeys, follow_symlinks=False) if metadata: self._write_metadata(metadata, follow_symlinks=False) @@ -190,7 +200,7 @@ def _ensure_different_file(self, source): raise err -class PurePath(PurePathBase): +class PurePath(JoinablePath): """Base class for manipulating paths without I/O. PurePath represents a filesystem path and offers operations which @@ -646,7 +656,7 @@ def full_match(self, pattern, *, case_sensitive=None): Return True if this path matches the given glob-style pattern. The pattern is matched against the entire path. """ - if not isinstance(pattern, PurePathBase): + if not isinstance(pattern, PurePath): pattern = self.with_segments(pattern) if case_sensitive is None: case_sensitive = self.parser is posixpath @@ -658,6 +668,32 @@ def full_match(self, pattern, *, case_sensitive=None): globber = _StringGlobber(self.parser.sep, case_sensitive, recursive=True) return globber.compile(pattern)(path) is not None + def match(self, path_pattern, *, case_sensitive=None): + """ + Return True if this path matches the given pattern. If the pattern is + relative, matching is done from the right; otherwise, the entire path + is matched. The recursive wildcard '**' is *not* supported by this + method. + """ + if not isinstance(path_pattern, PurePath): + path_pattern = self.with_segments(path_pattern) + if case_sensitive is None: + case_sensitive = self.parser is posixpath + path_parts = self.parts[::-1] + pattern_parts = path_pattern.parts[::-1] + if not pattern_parts: + raise ValueError("empty pattern") + if len(path_parts) < len(pattern_parts): + return False + if len(path_parts) > len(pattern_parts) and path_pattern.anchor: + return False + globber = _StringGlobber(self.parser.sep, case_sensitive) + for path_part, pattern_part in zip(path_parts, pattern_parts): + match = globber.compile(pattern_part) + if match(path_part) is None: + return False + return True + # Subclassing os.PathLike makes isinstance() checks slower, # which in turn makes Path construction slower. Register instead! os.PathLike.register(PurePath) @@ -683,7 +719,7 @@ class PureWindowsPath(PurePath): __slots__ = () -class Path(PathBase, PurePath): +class Path(WritablePath, ReadablePath, PurePath): """PurePath subclass that can make system calls. Path represents a filesystem path but unlike PurePath, also offers @@ -823,6 +859,13 @@ def open(self, mode='r', buffering=-1, encoding=None, encoding = io.text_encoding(encoding) return io.open(self, mode, buffering, encoding, errors, newline) + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode='rb', buffering=0) as f: + return f.read() + def read_text(self, encoding=None, errors=None, newline=None): """ Open the file in text mode, read it, and close the file. @@ -830,7 +873,17 @@ def read_text(self, encoding=None, errors=None, newline=None): # Call io.text_encoding() here to ensure any warning is raised at an # appropriate stack level. encoding = io.text_encoding(encoding) - return PathBase.read_text(self, encoding, errors, newline) + with self.open(mode='r', encoding=encoding, errors=errors, newline=newline) as f: + return f.read() + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + # type-check for the buffer interface before truncating the file + view = memoryview(data) + with self.open(mode='wb') as f: + return f.write(view) def write_text(self, data, encoding=None, errors=None, newline=None): """ @@ -839,7 +892,11 @@ def write_text(self, data, encoding=None, errors=None, newline=None): # Call io.text_encoding() here to ensure any warning is raised at an # appropriate stack level. encoding = io.text_encoding(encoding) - return PathBase.write_text(self, data, encoding, errors, newline) + if not isinstance(data, str): + raise TypeError('data must be str, not %s' % + data.__class__.__name__) + with self.open(mode='w', encoding=encoding, errors=errors, newline=newline) as f: + return f.write(data) _remove_leading_dot = operator.itemgetter(slice(2, None)) _remove_trailing_slash = operator.itemgetter(slice(-1)) @@ -1122,7 +1179,8 @@ def replace(self, target): os.replace(self, target) return self.with_segments(target) - copy = property(_LocalCopyWorker, doc=_LocalCopyWorker.__call__.__doc__) + _copy_reader = property(_LocalCopyReader) + _copy_writer = property(_LocalCopyWriter) def move(self, target): """ @@ -1134,9 +1192,9 @@ def move(self, target): except TypeError: pass else: - if not isinstance(target, PathBase): + if not hasattr(target, '_copy_writer'): target = self.with_segments(target_str) - target.copy._ensure_different_file(self) + target._copy_writer._ensure_different_file(self) try: os.replace(self, target_str) return target @@ -1155,7 +1213,7 @@ def move_into(self, target_dir): name = self.name if not name: raise ValueError(f"{self!r} has an empty name") - elif isinstance(target_dir, PathBase): + elif hasattr(target_dir, '_copy_writer'): target = target_dir / name else: target = self.with_segments(target_dir, name) diff --git a/Lib/pathlib/_types.py b/Lib/pathlib/_types.py index 72dac2e276fce0..84032bb5b4ff1a 100644 --- a/Lib/pathlib/_types.py +++ b/Lib/pathlib/_types.py @@ -9,7 +9,7 @@ class Parser(Protocol): """Protocol for path parsers, which do low-level path manipulation. Path parsers provide a subset of the os.path API, specifically those - functions needed to provide PurePathBase functionality. Each PurePathBase + functions needed to provide JoinablePath functionality. Each JoinablePath subclass references its path parser via a 'parser' class attribute. """ diff --git a/Lib/pdb.py b/Lib/pdb.py index 10d1923cdad2d6..beef74d792250b 100644 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1725,6 +1725,19 @@ def do_quit(self, arg): Quit from the debugger. The program being executed is aborted. """ + if self.mode == 'inline': + while True: + try: + reply = input('Quitting pdb will kill the process. Quit anyway? [y/n] ') + reply = reply.lower().strip() + except EOFError: + reply = 'y' + self.message('') + if reply == 'y' or reply == '': + sys.exit(0) + elif reply.lower() == 'n': + return + self._user_requested_quit = True self.set_quit() return 1 @@ -1738,9 +1751,7 @@ def do_EOF(self, arg): Handles the receipt of EOF as a command. """ self.message('') - self._user_requested_quit = True - self.set_quit() - return 1 + return self.do_quit(arg) def do_args(self, arg): """a(rgs) diff --git a/Lib/pickle.py b/Lib/pickle.py index 1920973e3f83e9..8afb4aa4285f37 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -31,7 +31,6 @@ import sys from sys import maxsize from struct import pack, unpack -import re import io import codecs import _compat_pickle @@ -188,7 +187,7 @@ def __init__(self, value): NEXT_BUFFER = b'\x97' # push next out-of-band buffer READONLY_BUFFER = b'\x98' # make top of stack readonly -__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$", x)]) +__all__.extend(x for x in dir() if x.isupper() and not x.startswith('_')) class _Framer: diff --git a/Lib/pstats.py b/Lib/pstats.py index 46e18fb7592a77..becaf35580eaee 100644 --- a/Lib/pstats.py +++ b/Lib/pstats.py @@ -29,7 +29,6 @@ from enum import StrEnum, _simple_enum from functools import cmp_to_key from dataclasses import dataclass -from typing import Dict __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] @@ -69,7 +68,7 @@ class FunctionProfile: class StatsProfile: '''Class for keeping track of an item in inventory.''' total_tt: float - func_profiles: Dict[str, FunctionProfile] + func_profiles: dict[str, FunctionProfile] class Stats: """This class is used for creating reports from data generated by the diff --git a/Lib/pydoc.py b/Lib/pydoc.py index c863794ea14ef9..922946e5fa7ddb 100644 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -53,6 +53,7 @@ class or function within a module or module in a package. If the # the current directory is changed with os.chdir(), an incorrect # path will be displayed. +import ast import __future__ import builtins import importlib._bootstrap @@ -384,21 +385,29 @@ def ispackage(path): return False def source_synopsis(file): - line = file.readline() - while line[:1] == '#' or not line.strip(): - line = file.readline() - if not line: break - line = line.strip() - if line[:4] == 'r"""': line = line[1:] - if line[:3] == '"""': - line = line[3:] - if line[-1:] == '\\': line = line[:-1] - while not line.strip(): - line = file.readline() - if not line: break - result = line.split('"""')[0].strip() - else: result = None - return result + """Return the one-line summary of a file object, if present""" + + string = '' + try: + tokens = tokenize.generate_tokens(file.readline) + for tok_type, tok_string, _, _, _ in tokens: + if tok_type == tokenize.STRING: + string += tok_string + elif tok_type == tokenize.NEWLINE: + with warnings.catch_warnings(): + # Ignore the "invalid escape sequence" warning. + warnings.simplefilter("ignore", SyntaxWarning) + docstring = ast.literal_eval(string) + if not isinstance(docstring, str): + return None + return docstring.strip().split('\n')[0].strip() + elif tok_type == tokenize.OP and tok_string in ('(', ')'): + string += tok_string + elif tok_type not in (tokenize.COMMENT, tokenize.NL, tokenize.ENCODING): + return None + except (tokenize.TokenError, UnicodeDecodeError, SyntaxError): + return None + return None def synopsis(filename, cache={}): """Get the one-line summary out of a module file.""" @@ -1426,7 +1435,8 @@ def makename(c, m=object.__module__): # List the built-in subclasses, if any: subclasses = sorted( (str(cls.__name__) for cls in type.__subclasses__(object) - if not cls.__name__.startswith("_") and cls.__module__ == "builtins"), + if (not cls.__name__.startswith("_") and + getattr(cls, '__module__', '') == "builtins")), key=str.lower ) no_of_subclasses = len(subclasses) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index aebcef2b81d43d..3e8c7ce321edc2 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,17478 +1,12781 @@ -# -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Dec 17 11:49:52 2024 +# Autogenerated by Sphinx on Tue Jan 21 03:33:33 2025 # as part of the release process. -topics = {'assert': 'The "assert" statement\n' - '**********************\n' - '\n' - 'Assert statements are a convenient way to insert debugging ' - 'assertions\n' - 'into a program:\n' - '\n' - ' assert_stmt ::= "assert" expression ["," expression]\n' - '\n' - 'The simple form, "assert expression", is equivalent to\n' - '\n' - ' if __debug__:\n' - ' if not expression: raise AssertionError\n' - '\n' - 'The extended form, "assert expression1, expression2", is ' - 'equivalent to\n' - '\n' - ' if __debug__:\n' - ' if not expression1: raise AssertionError(expression2)\n' - '\n' - 'These equivalences assume that "__debug__" and "AssertionError" ' - 'refer\n' - 'to the built-in variables with those names. In the current\n' - 'implementation, the built-in variable "__debug__" is "True" under\n' - 'normal circumstances, "False" when optimization is requested ' - '(command\n' - 'line option "-O"). The current code generator emits no code for ' - 'an\n' - '"assert" statement when optimization is requested at compile ' - 'time.\n' - 'Note that it is unnecessary to include the source code for the\n' - 'expression that failed in the error message; it will be displayed ' - 'as\n' - 'part of the stack trace.\n' - '\n' - 'Assignments to "__debug__" are illegal. The value for the ' - 'built-in\n' - 'variable is determined when the interpreter starts.\n', - 'assignment': 'Assignment statements\n' - '*********************\n' - '\n' - 'Assignment statements are used to (re)bind names to values and ' - 'to\n' - 'modify attributes or items of mutable objects:\n' - '\n' - ' assignment_stmt ::= (target_list "=")+ (starred_expression ' - '| yield_expression)\n' - ' target_list ::= target ("," target)* [","]\n' - ' target ::= identifier\n' - ' | "(" [target_list] ")"\n' - ' | "[" [target_list] "]"\n' - ' | attributeref\n' - ' | subscription\n' - ' | slicing\n' - ' | "*" target\n' - '\n' - '(See section Primaries for the syntax definitions for ' - '*attributeref*,\n' - '*subscription*, and *slicing*.)\n' - '\n' - 'An assignment statement evaluates the expression list ' - '(remember that\n' - 'this can be a single expression or a comma-separated list, the ' - 'latter\n' - 'yielding a tuple) and assigns the single resulting object to ' - 'each of\n' - 'the target lists, from left to right.\n' - '\n' - 'Assignment is defined recursively depending on the form of the ' - 'target\n' - '(list). When a target is part of a mutable object (an ' - 'attribute\n' - 'reference, subscription or slicing), the mutable object must\n' - 'ultimately perform the assignment and decide about its ' - 'validity, and\n' - 'may raise an exception if the assignment is unacceptable. The ' - 'rules\n' - 'observed by various types and the exceptions raised are given ' - 'with the\n' - 'definition of the object types (see section The standard type\n' - 'hierarchy).\n' - '\n' - 'Assignment of an object to a target list, optionally enclosed ' - 'in\n' - 'parentheses or square brackets, is recursively defined as ' - 'follows.\n' - '\n' - '* If the target list is a single target with no trailing ' - 'comma,\n' - ' optionally in parentheses, the object is assigned to that ' - 'target.\n' - '\n' - '* Else:\n' - '\n' - ' * If the target list contains one target prefixed with an ' - 'asterisk,\n' - ' called a “starred†target: The object must be an iterable ' - 'with at\n' - ' least as many items as there are targets in the target ' - 'list, minus\n' - ' one. The first items of the iterable are assigned, from ' - 'left to\n' - ' right, to the targets before the starred target. The ' - 'final items\n' - ' of the iterable are assigned to the targets after the ' - 'starred\n' - ' target. A list of the remaining items in the iterable is ' - 'then\n' - ' assigned to the starred target (the list can be empty).\n' - '\n' - ' * Else: The object must be an iterable with the same number ' - 'of items\n' - ' as there are targets in the target list, and the items ' - 'are\n' - ' assigned, from left to right, to the corresponding ' - 'targets.\n' - '\n' - 'Assignment of an object to a single target is recursively ' - 'defined as\n' - 'follows.\n' - '\n' - '* If the target is an identifier (name):\n' - '\n' - ' * If the name does not occur in a "global" or "nonlocal" ' - 'statement\n' - ' in the current code block: the name is bound to the object ' - 'in the\n' - ' current local namespace.\n' - '\n' - ' * Otherwise: the name is bound to the object in the global ' - 'namespace\n' - ' or the outer namespace determined by "nonlocal", ' - 'respectively.\n' - '\n' - ' The name is rebound if it was already bound. This may cause ' - 'the\n' - ' reference count for the object previously bound to the name ' - 'to reach\n' - ' zero, causing the object to be deallocated and its ' - 'destructor (if it\n' - ' has one) to be called.\n' - '\n' - '* If the target is an attribute reference: The primary ' - 'expression in\n' - ' the reference is evaluated. It should yield an object with\n' - ' assignable attributes; if this is not the case, "TypeError" ' - 'is\n' - ' raised. That object is then asked to assign the assigned ' - 'object to\n' - ' the given attribute; if it cannot perform the assignment, it ' - 'raises\n' - ' an exception (usually but not necessarily ' - '"AttributeError").\n' - '\n' - ' Note: If the object is a class instance and the attribute ' - 'reference\n' - ' occurs on both sides of the assignment operator, the ' - 'right-hand side\n' - ' expression, "a.x" can access either an instance attribute or ' - '(if no\n' - ' instance attribute exists) a class attribute. The left-hand ' - 'side\n' - ' target "a.x" is always set as an instance attribute, ' - 'creating it if\n' - ' necessary. Thus, the two occurrences of "a.x" do not ' - 'necessarily\n' - ' refer to the same attribute: if the right-hand side ' - 'expression\n' - ' refers to a class attribute, the left-hand side creates a ' - 'new\n' - ' instance attribute as the target of the assignment:\n' - '\n' - ' class Cls:\n' - ' x = 3 # class variable\n' - ' inst = Cls()\n' - ' inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x ' - 'as 3\n' - '\n' - ' This description does not necessarily apply to descriptor\n' - ' attributes, such as properties created with "property()".\n' - '\n' - '* If the target is a subscription: The primary expression in ' - 'the\n' - ' reference is evaluated. It should yield either a mutable ' - 'sequence\n' - ' object (such as a list) or a mapping object (such as a ' - 'dictionary).\n' - ' Next, the subscript expression is evaluated.\n' - '\n' - ' If the primary is a mutable sequence object (such as a ' - 'list), the\n' - ' subscript must yield an integer. If it is negative, the ' - 'sequence’s\n' - ' length is added to it. The resulting value must be a ' - 'nonnegative\n' - ' integer less than the sequence’s length, and the sequence is ' - 'asked\n' - ' to assign the assigned object to its item with that index. ' - 'If the\n' - ' index is out of range, "IndexError" is raised (assignment to ' - 'a\n' - ' subscripted sequence cannot add new items to a list).\n' - '\n' - ' If the primary is a mapping object (such as a dictionary), ' - 'the\n' - ' subscript must have a type compatible with the mapping’s key ' - 'type,\n' - ' and the mapping is then asked to create a key/value pair ' - 'which maps\n' - ' the subscript to the assigned object. This can either ' - 'replace an\n' - ' existing key/value pair with the same key value, or insert a ' - 'new\n' - ' key/value pair (if no key with the same value existed).\n' - '\n' - ' For user-defined objects, the "__setitem__()" method is ' - 'called with\n' - ' appropriate arguments.\n' - '\n' - '* If the target is a slicing: The primary expression in the ' - 'reference\n' - ' is evaluated. It should yield a mutable sequence object ' - '(such as a\n' - ' list). The assigned object should be a sequence object of ' - 'the same\n' - ' type. Next, the lower and upper bound expressions are ' - 'evaluated,\n' - ' insofar they are present; defaults are zero and the ' - 'sequence’s\n' - ' length. The bounds should evaluate to integers. If either ' - 'bound is\n' - ' negative, the sequence’s length is added to it. The ' - 'resulting\n' - ' bounds are clipped to lie between zero and the sequence’s ' - 'length,\n' - ' inclusive. Finally, the sequence object is asked to replace ' - 'the\n' - ' slice with the items of the assigned sequence. The length ' - 'of the\n' - ' slice may be different from the length of the assigned ' - 'sequence,\n' - ' thus changing the length of the target sequence, if the ' - 'target\n' - ' sequence allows it.\n' - '\n' - '**CPython implementation detail:** In the current ' - 'implementation, the\n' - 'syntax for targets is taken to be the same as for expressions, ' - 'and\n' - 'invalid syntax is rejected during the code generation phase, ' - 'causing\n' - 'less detailed error messages.\n' - '\n' - 'Although the definition of assignment implies that overlaps ' - 'between\n' - 'the left-hand side and the right-hand side are ‘simultaneous’ ' - '(for\n' - 'example "a, b = b, a" swaps two variables), overlaps *within* ' - 'the\n' - 'collection of assigned-to variables occur left-to-right, ' - 'sometimes\n' - 'resulting in confusion. For instance, the following program ' - 'prints\n' - '"[0, 2]":\n' - '\n' - ' x = [0, 1]\n' - ' i = 0\n' - ' i, x[i] = 1, 2 # i is updated, then x[i] is ' - 'updated\n' - ' print(x)\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3132** - Extended Iterable Unpacking\n' - ' The specification for the "*target" feature.\n' - '\n' - '\n' - 'Augmented assignment statements\n' - '===============================\n' - '\n' - 'Augmented assignment is the combination, in a single ' - 'statement, of a\n' - 'binary operation and an assignment statement:\n' - '\n' - ' augmented_assignment_stmt ::= augtarget augop ' - '(expression_list | yield_expression)\n' - ' augtarget ::= identifier | attributeref | ' - 'subscription | slicing\n' - ' augop ::= "+=" | "-=" | "*=" | "@=" | ' - '"/=" | "//=" | "%=" | "**="\n' - ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' - '\n' - '(See section Primaries for the syntax definitions of the last ' - 'three\n' - 'symbols.)\n' - '\n' - 'An augmented assignment evaluates the target (which, unlike ' - 'normal\n' - 'assignment statements, cannot be an unpacking) and the ' - 'expression\n' - 'list, performs the binary operation specific to the type of ' - 'assignment\n' - 'on the two operands, and assigns the result to the original ' - 'target.\n' - 'The target is only evaluated once.\n' - '\n' - 'An augmented assignment statement like "x += 1" can be ' - 'rewritten as "x\n' - '= x + 1" to achieve a similar, but not exactly equal effect. ' - 'In the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' - 'rather than\n' - 'creating a new object and assigning that to the target, the ' - 'old object\n' - 'is modified instead.\n' - '\n' - 'Unlike normal assignments, augmented assignments evaluate the ' - 'left-\n' - 'hand side *before* evaluating the right-hand side. For ' - 'example, "a[i]\n' - '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' - 'performs\n' - 'the addition, and lastly, it writes the result back to ' - '"a[i]".\n' - '\n' - 'With the exception of assigning to tuples and multiple targets ' - 'in a\n' - 'single statement, the assignment done by augmented assignment\n' - 'statements is handled the same way as normal assignments. ' - 'Similarly,\n' - 'with the exception of the possible *in-place* behavior, the ' - 'binary\n' - 'operation performed by augmented assignment is the same as the ' - 'normal\n' - 'binary operations.\n' - '\n' - 'For targets which are attribute references, the same caveat ' - 'about\n' - 'class and instance attributes applies as for regular ' - 'assignments.\n' - '\n' - '\n' - 'Annotated assignment statements\n' - '===============================\n' - '\n' - '*Annotation* assignment is the combination, in a single ' - 'statement, of\n' - 'a variable or attribute annotation and an optional assignment\n' - 'statement:\n' - '\n' - ' annotated_assignment_stmt ::= augtarget ":" expression\n' - ' ["=" (starred_expression | ' - 'yield_expression)]\n' - '\n' - 'The difference from normal Assignment statements is that only ' - 'a single\n' - 'target is allowed.\n' - '\n' - 'The assignment target is considered “simple†if it consists of ' - 'a\n' - 'single name that is not enclosed in parentheses. For simple ' - 'assignment\n' - 'targets, if in class or module scope, the annotations are ' - 'gathered in\n' - 'a lazily evaluated annotation scope. The annotations can be ' - 'evaluated\n' - 'using the "__annotations__" attribute of a class or module, or ' - 'using\n' - 'the facilities in the "annotationlib" module.\n' - '\n' - 'If the assignment target is not simple (an attribute, ' - 'subscript node,\n' - 'or parenthesized name), the annotation is never evaluated.\n' - '\n' - 'If a name is annotated in a function scope, then this name is ' - 'local\n' - 'for that scope. Annotations are never evaluated and stored in ' - 'function\n' - 'scopes.\n' - '\n' - 'If the right hand side is present, an annotated assignment ' - 'performs\n' - 'the actual assignment as if there was no annotation present. ' - 'If the\n' - 'right hand side is not present for an expression target, then ' - 'the\n' - 'interpreter evaluates the target except for the last ' - '"__setitem__()"\n' - 'or "__setattr__()" call.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' The proposal that added syntax for annotating the types ' - 'of\n' - ' variables (including class variables and instance ' - 'variables),\n' - ' instead of expressing them through comments.\n' - '\n' - ' **PEP 484** - Type hints\n' - ' The proposal that added the "typing" module to provide a ' - 'standard\n' - ' syntax for type annotations that can be used in static ' - 'analysis\n' - ' tools and IDEs.\n' - '\n' - 'Changed in version 3.8: Now annotated assignments allow the ' - 'same\n' - 'expressions in the right hand side as regular assignments. ' - 'Previously,\n' - 'some expressions (like un-parenthesized tuple expressions) ' - 'caused a\n' - 'syntax error.\n' - '\n' - 'Changed in version 3.14: Annotations are now lazily evaluated ' - 'in a\n' - 'separate annotation scope. If the assignment target is not ' - 'simple,\n' - 'annotations are never evaluated.\n', - 'assignment-expressions': 'Assignment expressions\n' - '**********************\n' - '\n' - ' assignment_expression ::= [identifier ":="] ' - 'expression\n' - '\n' - 'An assignment expression (sometimes also called a ' - '“named expressionâ€\n' - 'or “walrusâ€) assigns an "expression" to an ' - '"identifier", while also\n' - 'returning the value of the "expression".\n' - '\n' - 'One common use case is when handling matched ' - 'regular expressions:\n' - '\n' - ' if matching := pattern.search(data):\n' - ' do_something(matching)\n' - '\n' - 'Or, when processing a file stream in chunks:\n' - '\n' - ' while chunk := file.read(9000):\n' - ' process(chunk)\n' - '\n' - 'Assignment expressions must be surrounded by ' - 'parentheses when used as\n' - 'expression statements and when used as ' - 'sub-expressions in slicing,\n' - 'conditional, lambda, keyword-argument, and ' - 'comprehension-if\n' - 'expressions and in "assert", "with", and ' - '"assignment" statements. In\n' - 'all other places where they can be used, ' - 'parentheses are not required,\n' - 'including in "if" and "while" statements.\n' - '\n' - 'Added in version 3.8: See **PEP 572** for more ' - 'details about\n' - 'assignment expressions.\n', - 'async': 'Coroutines\n' - '**********\n' - '\n' - 'Added in version 3.5.\n' - '\n' - '\n' - 'Coroutine function definition\n' - '=============================\n' - '\n' - ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - '\n' - 'Execution of Python coroutines can be suspended and resumed at ' - 'many\n' - 'points (see *coroutine*). "await" expressions, "async for" and ' - '"async\n' - 'with" can only be used in the body of a coroutine function.\n' - '\n' - 'Functions defined with "async def" syntax are always coroutine\n' - 'functions, even if they do not contain "await" or "async" ' - 'keywords.\n' - '\n' - 'It is a "SyntaxError" to use a "yield from" expression inside the ' - 'body\n' - 'of a coroutine function.\n' - '\n' - 'An example of a coroutine function:\n' - '\n' - ' async def func(param1, param2):\n' - ' do_stuff()\n' - ' await some_coroutine()\n' - '\n' - 'Changed in version 3.7: "await" and "async" are now keywords;\n' - 'previously they were only treated as such inside the body of a\n' - 'coroutine function.\n' - '\n' - '\n' - 'The "async for" statement\n' - '=========================\n' - '\n' - ' async_for_stmt ::= "async" for_stmt\n' - '\n' - 'An *asynchronous iterable* provides an "__aiter__" method that\n' - 'directly returns an *asynchronous iterator*, which can call\n' - 'asynchronous code in its "__anext__" method.\n' - '\n' - 'The "async for" statement allows convenient iteration over\n' - 'asynchronous iterables.\n' - '\n' - 'The following code:\n' - '\n' - ' async for TARGET in ITER:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'Is semantically equivalent to:\n' - '\n' - ' iter = (ITER)\n' - ' iter = type(iter).__aiter__(iter)\n' - ' running = True\n' - '\n' - ' while running:\n' - ' try:\n' - ' TARGET = await type(iter).__anext__(iter)\n' - ' except StopAsyncIteration:\n' - ' running = False\n' - ' else:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'See also "__aiter__()" and "__anext__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async for" statement outside the ' - 'body\n' - 'of a coroutine function.\n' - '\n' - '\n' - 'The "async with" statement\n' - '==========================\n' - '\n' - ' async_with_stmt ::= "async" with_stmt\n' - '\n' - 'An *asynchronous context manager* is a *context manager* that is ' - 'able\n' - 'to suspend execution in its *enter* and *exit* methods.\n' - '\n' - 'The following code:\n' - '\n' - ' async with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' aenter = type(manager).__aenter__\n' - ' aexit = type(manager).__aexit__\n' - ' value = await aenter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not await aexit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' await aexit(manager, None, None, None)\n' - '\n' - 'See also "__aenter__()" and "__aexit__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async with" statement outside the\n' - 'body of a coroutine function.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 492** - Coroutines with async and await syntax\n' - ' The proposal that made coroutines a proper standalone concept ' - 'in\n' - ' Python, and added supporting syntax.\n', - 'atom-identifiers': 'Identifiers (Names)\n' - '*******************\n' - '\n' - 'An identifier occurring as an atom is a name. See ' - 'section Identifiers\n' - 'and keywords for lexical definition and section Naming ' - 'and binding for\n' - 'documentation of naming and binding.\n' - '\n' - 'When the name is bound to an object, evaluation of the ' - 'atom yields\n' - 'that object. When a name is not bound, an attempt to ' - 'evaluate it\n' - 'raises a "NameError" exception.\n' - '\n' - '\n' - 'Private name mangling\n' - '=====================\n' - '\n' - 'When an identifier that textually occurs in a class ' - 'definition begins\n' - 'with two or more underscore characters and does not end ' - 'in two or more\n' - 'underscores, it is considered a *private name* of that ' - 'class.\n' - '\n' - 'See also: The class specifications.\n' - '\n' - 'More precisely, private names are transformed to a ' - 'longer form before\n' - 'code is generated for them. If the transformed name is ' - 'longer than\n' - '255 characters, implementation-defined truncation may ' - 'happen.\n' - '\n' - 'The transformation is independent of the syntactical ' - 'context in which\n' - 'the identifier is used but only the following private ' - 'identifiers are\n' - 'mangled:\n' - '\n' - '* Any name used as the name of a variable that is ' - 'assigned or read or\n' - ' any name of an attribute being accessed.\n' - '\n' - ' The "__name__" attribute of nested functions, classes, ' - 'and type\n' - ' aliases is however not mangled.\n' - '\n' - '* The name of imported modules, e.g., "__spam" in ' - '"import __spam". If\n' - ' the module is part of a package (i.e., its name ' - 'contains a dot), the\n' - ' name is *not* mangled, e.g., the "__foo" in "import ' - '__foo.bar" is\n' - ' not mangled.\n' - '\n' - '* The name of an imported member, e.g., "__f" in "from ' - 'spam import\n' - ' __f".\n' - '\n' - 'The transformation rule is defined as follows:\n' - '\n' - '* The class name, with leading underscores removed and a ' - 'single\n' - ' leading underscore inserted, is inserted in front of ' - 'the identifier,\n' - ' e.g., the identifier "__spam" occurring in a class ' - 'named "Foo",\n' - ' "_Foo" or "__Foo" is transformed to "_Foo__spam".\n' - '\n' - '* If the class name consists only of underscores, the ' - 'transformation\n' - ' is the identity, e.g., the identifier "__spam" ' - 'occurring in a class\n' - ' named "_" or "__" is left as is.\n', - 'atom-literals': 'Literals\n' - '********\n' - '\n' - 'Python supports string and bytes literals and various ' - 'numeric\n' - 'literals:\n' - '\n' - ' literal ::= stringliteral | bytesliteral\n' - ' | integer | floatnumber | imagnumber\n' - '\n' - 'Evaluation of a literal yields an object of the given type ' - '(string,\n' - 'bytes, integer, floating-point number, complex number) with ' - 'the given\n' - 'value. The value may be approximated in the case of ' - 'floating-point\n' - 'and imaginary (complex) literals. See section Literals for ' - 'details.\n' - '\n' - 'All literals correspond to immutable data types, and hence ' - 'the\n' - 'object’s identity is less important than its value. ' - 'Multiple\n' - 'evaluations of literals with the same value (either the ' - 'same\n' - 'occurrence in the program text or a different occurrence) ' - 'may obtain\n' - 'the same object or a different object with the same ' - 'value.\n', - 'attribute-access': 'Customizing attribute access\n' - '****************************\n' - '\n' - 'The following methods can be defined to customize the ' - 'meaning of\n' - 'attribute access (use of, assignment to, or deletion of ' - '"x.name") for\n' - 'class instances.\n' - '\n' - 'object.__getattr__(self, name)\n' - '\n' - ' Called when the default attribute access fails with ' - 'an\n' - ' "AttributeError" (either "__getattribute__()" raises ' - 'an\n' - ' "AttributeError" because *name* is not an instance ' - 'attribute or an\n' - ' attribute in the class tree for "self"; or ' - '"__get__()" of a *name*\n' - ' property raises "AttributeError"). This method ' - 'should either\n' - ' return the (computed) attribute value or raise an ' - '"AttributeError"\n' - ' exception. The "object" class itself does not provide ' - 'this method.\n' - '\n' - ' Note that if the attribute is found through the ' - 'normal mechanism,\n' - ' "__getattr__()" is not called. (This is an ' - 'intentional asymmetry\n' - ' between "__getattr__()" and "__setattr__()".) This is ' - 'done both for\n' - ' efficiency reasons and because otherwise ' - '"__getattr__()" would have\n' - ' no way to access other attributes of the instance. ' - 'Note that at\n' - ' least for instance variables, you can take total ' - 'control by not\n' - ' inserting any values in the instance attribute ' - 'dictionary (but\n' - ' instead inserting them in another object). See the\n' - ' "__getattribute__()" method below for a way to ' - 'actually get total\n' - ' control over attribute access.\n' - '\n' - 'object.__getattribute__(self, name)\n' - '\n' - ' Called unconditionally to implement attribute ' - 'accesses for\n' - ' instances of the class. If the class also defines ' - '"__getattr__()",\n' - ' the latter will not be called unless ' - '"__getattribute__()" either\n' - ' calls it explicitly or raises an "AttributeError". ' - 'This method\n' - ' should return the (computed) attribute value or raise ' - 'an\n' - ' "AttributeError" exception. In order to avoid ' - 'infinite recursion in\n' - ' this method, its implementation should always call ' - 'the base class\n' - ' method with the same name to access any attributes it ' - 'needs, for\n' - ' example, "object.__getattribute__(self, name)".\n' - '\n' - ' Note:\n' - '\n' - ' This method may still be bypassed when looking up ' - 'special methods\n' - ' as the result of implicit invocation via language ' - 'syntax or\n' - ' built-in functions. See Special method lookup.\n' - '\n' - ' For certain sensitive attribute accesses, raises an ' - 'auditing event\n' - ' "object.__getattr__" with arguments "obj" and ' - '"name".\n' - '\n' - 'object.__setattr__(self, name, value)\n' - '\n' - ' Called when an attribute assignment is attempted. ' - 'This is called\n' - ' instead of the normal mechanism (i.e. store the value ' - 'in the\n' - ' instance dictionary). *name* is the attribute name, ' - '*value* is the\n' - ' value to be assigned to it.\n' - '\n' - ' If "__setattr__()" wants to assign to an instance ' - 'attribute, it\n' - ' should call the base class method with the same name, ' - 'for example,\n' - ' "object.__setattr__(self, name, value)".\n' - '\n' - ' For certain sensitive attribute assignments, raises ' - 'an auditing\n' - ' event "object.__setattr__" with arguments "obj", ' - '"name", "value".\n' - '\n' - 'object.__delattr__(self, name)\n' - '\n' - ' Like "__setattr__()" but for attribute deletion ' - 'instead of\n' - ' assignment. This should only be implemented if "del ' - 'obj.name" is\n' - ' meaningful for the object.\n' - '\n' - ' For certain sensitive attribute deletions, raises an ' - 'auditing event\n' - ' "object.__delattr__" with arguments "obj" and ' - '"name".\n' - '\n' - 'object.__dir__(self)\n' - '\n' - ' Called when "dir()" is called on the object. An ' - 'iterable must be\n' - ' returned. "dir()" converts the returned iterable to a ' - 'list and\n' - ' sorts it.\n' - '\n' - '\n' - 'Customizing module attribute access\n' - '===================================\n' - '\n' - 'Special names "__getattr__" and "__dir__" can be also ' - 'used to\n' - 'customize access to module attributes. The "__getattr__" ' - 'function at\n' - 'the module level should accept one argument which is the ' - 'name of an\n' - 'attribute and return the computed value or raise an ' - '"AttributeError".\n' - 'If an attribute is not found on a module object through ' - 'the normal\n' - 'lookup, i.e. "object.__getattribute__()", then ' - '"__getattr__" is\n' - 'searched in the module "__dict__" before raising an ' - '"AttributeError".\n' - 'If found, it is called with the attribute name and the ' - 'result is\n' - 'returned.\n' - '\n' - 'The "__dir__" function should accept no arguments, and ' - 'return an\n' - 'iterable of strings that represents the names accessible ' - 'on module. If\n' - 'present, this function overrides the standard "dir()" ' - 'search on a\n' - 'module.\n' - '\n' - 'For a more fine grained customization of the module ' - 'behavior (setting\n' - 'attributes, properties, etc.), one can set the ' - '"__class__" attribute\n' - 'of a module object to a subclass of "types.ModuleType". ' - 'For example:\n' - '\n' - ' import sys\n' - ' from types import ModuleType\n' - '\n' - ' class VerboseModule(ModuleType):\n' - ' def __repr__(self):\n' - " return f'Verbose {self.__name__}'\n" - '\n' - ' def __setattr__(self, attr, value):\n' - " print(f'Setting {attr}...')\n" - ' super().__setattr__(attr, value)\n' - '\n' - ' sys.modules[__name__].__class__ = VerboseModule\n' - '\n' - 'Note:\n' - '\n' - ' Defining module "__getattr__" and setting module ' - '"__class__" only\n' - ' affect lookups made using the attribute access syntax ' - '– directly\n' - ' accessing the module globals (whether by code within ' - 'the module, or\n' - ' via a reference to the module’s globals dictionary) is ' - 'unaffected.\n' - '\n' - 'Changed in version 3.5: "__class__" module attribute is ' - 'now writable.\n' - '\n' - 'Added in version 3.7: "__getattr__" and "__dir__" module ' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 562** - Module __getattr__ and __dir__\n' - ' Describes the "__getattr__" and "__dir__" functions ' - 'on modules.\n' - '\n' - '\n' - 'Implementing Descriptors\n' - '========================\n' - '\n' - 'The following methods only apply when an instance of the ' - 'class\n' - 'containing the method (a so-called *descriptor* class) ' - 'appears in an\n' - '*owner* class (the descriptor must be in either the ' - 'owner’s class\n' - 'dictionary or in the class dictionary for one of its ' - 'parents). In the\n' - 'examples below, “the attribute†refers to the attribute ' - 'whose name is\n' - 'the key of the property in the owner class’ "__dict__". ' - 'The "object"\n' - 'class itself does not implement any of these protocols.\n' - '\n' - 'object.__get__(self, instance, owner=None)\n' - '\n' - ' Called to get the attribute of the owner class (class ' - 'attribute\n' - ' access) or of an instance of that class (instance ' - 'attribute\n' - ' access). The optional *owner* argument is the owner ' - 'class, while\n' - ' *instance* is the instance that the attribute was ' - 'accessed through,\n' - ' or "None" when the attribute is accessed through the ' - '*owner*.\n' - '\n' - ' This method should return the computed attribute ' - 'value or raise an\n' - ' "AttributeError" exception.\n' - '\n' - ' **PEP 252** specifies that "__get__()" is callable ' - 'with one or two\n' - ' arguments. Python’s own built-in descriptors support ' - 'this\n' - ' specification; however, it is likely that some ' - 'third-party tools\n' - ' have descriptors that require both arguments. ' - 'Python’s own\n' - ' "__getattribute__()" implementation always passes in ' - 'both arguments\n' - ' whether they are required or not.\n' - '\n' - 'object.__set__(self, instance, value)\n' - '\n' - ' Called to set the attribute on an instance *instance* ' - 'of the owner\n' - ' class to a new value, *value*.\n' - '\n' - ' Note, adding "__set__()" or "__delete__()" changes ' - 'the kind of\n' - ' descriptor to a “data descriptorâ€. See Invoking ' - 'Descriptors for\n' - ' more details.\n' - '\n' - 'object.__delete__(self, instance)\n' - '\n' - ' Called to delete the attribute on an instance ' - '*instance* of the\n' - ' owner class.\n' - '\n' - 'Instances of descriptors may also have the ' - '"__objclass__" attribute\n' - 'present:\n' - '\n' - 'object.__objclass__\n' - '\n' - ' The attribute "__objclass__" is interpreted by the ' - '"inspect" module\n' - ' as specifying the class where this object was defined ' - '(setting this\n' - ' appropriately can assist in runtime introspection of ' - 'dynamic class\n' - ' attributes). For callables, it may indicate that an ' - 'instance of the\n' - ' given type (or a subclass) is expected or required as ' - 'the first\n' - ' positional argument (for example, CPython sets this ' - 'attribute for\n' - ' unbound methods that are implemented in C).\n' - '\n' - '\n' - 'Invoking Descriptors\n' - '====================\n' - '\n' - 'In general, a descriptor is an object attribute with ' - '“binding\n' - 'behaviorâ€, one whose attribute access has been ' - 'overridden by methods\n' - 'in the descriptor protocol: "__get__()", "__set__()", ' - 'and\n' - '"__delete__()". If any of those methods are defined for ' - 'an object, it\n' - 'is said to be a descriptor.\n' - '\n' - 'The default behavior for attribute access is to get, ' - 'set, or delete\n' - 'the attribute from an object’s dictionary. For instance, ' - '"a.x" has a\n' - 'lookup chain starting with "a.__dict__[\'x\']", then\n' - '"type(a).__dict__[\'x\']", and continuing through the ' - 'base classes of\n' - '"type(a)" excluding metaclasses.\n' - '\n' - 'However, if the looked-up value is an object defining ' - 'one of the\n' - 'descriptor methods, then Python may override the default ' - 'behavior and\n' - 'invoke the descriptor method instead. Where this occurs ' - 'in the\n' - 'precedence chain depends on which descriptor methods ' - 'were defined and\n' - 'how they were called.\n' - '\n' - 'The starting point for descriptor invocation is a ' - 'binding, "a.x". How\n' - 'the arguments are assembled depends on "a":\n' - '\n' - 'Direct Call\n' - ' The simplest and least common call is when user code ' - 'directly\n' - ' invokes a descriptor method: "x.__get__(a)".\n' - '\n' - 'Instance Binding\n' - ' If binding to an object instance, "a.x" is ' - 'transformed into the\n' - ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' - '\n' - 'Class Binding\n' - ' If binding to a class, "A.x" is transformed into the ' - 'call:\n' - ' "A.__dict__[\'x\'].__get__(None, A)".\n' - '\n' - 'Super Binding\n' - ' A dotted lookup such as "super(A, a).x" searches\n' - ' "a.__class__.__mro__" for a base class "B" following ' - '"A" and then\n' - ' returns "B.__dict__[\'x\'].__get__(a, A)". If not a ' - 'descriptor, "x"\n' - ' is returned unchanged.\n' - '\n' - 'For instance bindings, the precedence of descriptor ' - 'invocation depends\n' - 'on which descriptor methods are defined. A descriptor ' - 'can define any\n' - 'combination of "__get__()", "__set__()" and ' - '"__delete__()". If it\n' - 'does not define "__get__()", then accessing the ' - 'attribute will return\n' - 'the descriptor object itself unless there is a value in ' - 'the object’s\n' - 'instance dictionary. If the descriptor defines ' - '"__set__()" and/or\n' - '"__delete__()", it is a data descriptor; if it defines ' - 'neither, it is\n' - 'a non-data descriptor. Normally, data descriptors ' - 'define both\n' - '"__get__()" and "__set__()", while non-data descriptors ' - 'have just the\n' - '"__get__()" method. Data descriptors with "__get__()" ' - 'and "__set__()"\n' - '(and/or "__delete__()") defined always override a ' - 'redefinition in an\n' - 'instance dictionary. In contrast, non-data descriptors ' - 'can be\n' - 'overridden by instances.\n' - '\n' - 'Python methods (including those decorated with ' - '"@staticmethod" and\n' - '"@classmethod") are implemented as non-data ' - 'descriptors. Accordingly,\n' - 'instances can redefine and override methods. This ' - 'allows individual\n' - 'instances to acquire behaviors that differ from other ' - 'instances of the\n' - 'same class.\n' - '\n' - 'The "property()" function is implemented as a data ' - 'descriptor.\n' - 'Accordingly, instances cannot override the behavior of a ' - 'property.\n' - '\n' - '\n' - '__slots__\n' - '=========\n' - '\n' - '*__slots__* allow us to explicitly declare data members ' - '(like\n' - 'properties) and deny the creation of "__dict__" and ' - '*__weakref__*\n' - '(unless explicitly declared in *__slots__* or available ' - 'in a parent.)\n' - '\n' - 'The space saved over using "__dict__" can be ' - 'significant. Attribute\n' - 'lookup speed can be significantly improved as well.\n' - '\n' - 'object.__slots__\n' - '\n' - ' This class variable can be assigned a string, ' - 'iterable, or sequence\n' - ' of strings with variable names used by instances. ' - '*__slots__*\n' - ' reserves space for the declared variables and ' - 'prevents the\n' - ' automatic creation of "__dict__" and *__weakref__* ' - 'for each\n' - ' instance.\n' - '\n' - 'Notes on using *__slots__*:\n' - '\n' - '* When inheriting from a class without *__slots__*, the ' - '"__dict__" and\n' - ' *__weakref__* attribute of the instances will always ' - 'be accessible.\n' - '\n' - '* Without a "__dict__" variable, instances cannot be ' - 'assigned new\n' - ' variables not listed in the *__slots__* definition. ' - 'Attempts to\n' - ' assign to an unlisted variable name raises ' - '"AttributeError". If\n' - ' dynamic assignment of new variables is desired, then ' - 'add\n' - ' "\'__dict__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' - '\n' - '* Without a *__weakref__* variable for each instance, ' - 'classes defining\n' - ' *__slots__* do not support "weak references" to its ' - 'instances. If\n' - ' weak reference support is needed, then add ' - '"\'__weakref__\'" to the\n' - ' sequence of strings in the *__slots__* declaration.\n' - '\n' - '* *__slots__* are implemented at the class level by ' - 'creating\n' - ' descriptors for each variable name. As a result, ' - 'class attributes\n' - ' cannot be used to set default values for instance ' - 'variables defined\n' - ' by *__slots__*; otherwise, the class attribute would ' - 'overwrite the\n' - ' descriptor assignment.\n' - '\n' - '* The action of a *__slots__* declaration is not limited ' - 'to the class\n' - ' where it is defined. *__slots__* declared in parents ' - 'are available\n' - ' in child classes. However, instances of a child ' - 'subclass will get a\n' - ' "__dict__" and *__weakref__* unless the subclass also ' - 'defines\n' - ' *__slots__* (which should only contain names of any ' - '*additional*\n' - ' slots).\n' - '\n' - '* If a class defines a slot also defined in a base ' - 'class, the instance\n' - ' variable defined by the base class slot is ' - 'inaccessible (except by\n' - ' retrieving its descriptor directly from the base ' - 'class). This\n' - ' renders the meaning of the program undefined. In the ' - 'future, a\n' - ' check may be added to prevent this.\n' - '\n' - '* "TypeError" will be raised if nonempty *__slots__* are ' - 'defined for a\n' - ' class derived from a ""variable-length" built-in type" ' - 'such as\n' - ' "int", "bytes", and "tuple".\n' - '\n' - '* Any non-string *iterable* may be assigned to ' - '*__slots__*.\n' - '\n' - '* If a "dictionary" is used to assign *__slots__*, the ' - 'dictionary keys\n' - ' will be used as the slot names. The values of the ' - 'dictionary can be\n' - ' used to provide per-attribute docstrings that will be ' - 'recognised by\n' - ' "inspect.getdoc()" and displayed in the output of ' - '"help()".\n' - '\n' - '* "__class__" assignment works only if both classes have ' - 'the same\n' - ' *__slots__*.\n' - '\n' - '* Multiple inheritance with multiple slotted parent ' - 'classes can be\n' - ' used, but only one parent is allowed to have ' - 'attributes created by\n' - ' slots (the other bases must have empty slot layouts) - ' - 'violations\n' - ' raise "TypeError".\n' - '\n' - '* If an *iterator* is used for *__slots__* then a ' - '*descriptor* is\n' - ' created for each of the iterator’s values. However, ' - 'the *__slots__*\n' - ' attribute will be an empty iterator.\n', - 'attribute-references': 'Attribute references\n' - '********************\n' - '\n' - 'An attribute reference is a primary followed by a ' - 'period and a name:\n' - '\n' - ' attributeref ::= primary "." identifier\n' - '\n' - 'The primary must evaluate to an object of a type ' - 'that supports\n' - 'attribute references, which most objects do. This ' - 'object is then\n' - 'asked to produce the attribute whose name is the ' - 'identifier. The type\n' - 'and value produced is determined by the object. ' - 'Multiple evaluations\n' - 'of the same attribute reference may yield different ' - 'objects.\n' - '\n' - 'This production can be customized by overriding the\n' - '"__getattribute__()" method or the "__getattr__()" ' - 'method. The\n' - '"__getattribute__()" method is called first and ' - 'either returns a value\n' - 'or raises "AttributeError" if the attribute is not ' - 'available.\n' - '\n' - 'If an "AttributeError" is raised and the object has ' - 'a "__getattr__()"\n' - 'method, that method is called as a fallback.\n', - 'augassign': 'Augmented assignment statements\n' - '*******************************\n' - '\n' - 'Augmented assignment is the combination, in a single statement, ' - 'of a\n' - 'binary operation and an assignment statement:\n' - '\n' - ' augmented_assignment_stmt ::= augtarget augop ' - '(expression_list | yield_expression)\n' - ' augtarget ::= identifier | attributeref | ' - 'subscription | slicing\n' - ' augop ::= "+=" | "-=" | "*=" | "@=" | ' - '"/=" | "//=" | "%=" | "**="\n' - ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' - '\n' - '(See section Primaries for the syntax definitions of the last ' - 'three\n' - 'symbols.)\n' - '\n' - 'An augmented assignment evaluates the target (which, unlike ' - 'normal\n' - 'assignment statements, cannot be an unpacking) and the ' - 'expression\n' - 'list, performs the binary operation specific to the type of ' - 'assignment\n' - 'on the two operands, and assigns the result to the original ' - 'target.\n' - 'The target is only evaluated once.\n' - '\n' - 'An augmented assignment statement like "x += 1" can be ' - 'rewritten as "x\n' - '= x + 1" to achieve a similar, but not exactly equal effect. In ' - 'the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' - 'rather than\n' - 'creating a new object and assigning that to the target, the old ' - 'object\n' - 'is modified instead.\n' - '\n' - 'Unlike normal assignments, augmented assignments evaluate the ' - 'left-\n' - 'hand side *before* evaluating the right-hand side. For ' - 'example, "a[i]\n' - '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' - 'performs\n' - 'the addition, and lastly, it writes the result back to "a[i]".\n' - '\n' - 'With the exception of assigning to tuples and multiple targets ' - 'in a\n' - 'single statement, the assignment done by augmented assignment\n' - 'statements is handled the same way as normal assignments. ' - 'Similarly,\n' - 'with the exception of the possible *in-place* behavior, the ' - 'binary\n' - 'operation performed by augmented assignment is the same as the ' - 'normal\n' - 'binary operations.\n' - '\n' - 'For targets which are attribute references, the same caveat ' - 'about\n' - 'class and instance attributes applies as for regular ' - 'assignments.\n', - 'await': 'Await expression\n' - '****************\n' - '\n' - 'Suspend the execution of *coroutine* on an *awaitable* object. Can\n' - 'only be used inside a *coroutine function*.\n' - '\n' - ' await_expr ::= "await" primary\n' - '\n' - 'Added in version 3.5.\n', - 'binary': 'Binary arithmetic operations\n' - '****************************\n' - '\n' - 'The binary arithmetic operations have the conventional priority\n' - 'levels. Note that some of these operations also apply to certain ' - 'non-\n' - 'numeric types. Apart from the power operator, there are only two\n' - 'levels, one for multiplicative operators and one for additive\n' - 'operators:\n' - '\n' - ' m_expr ::= u_expr | m_expr "*" u_expr | m_expr "@" m_expr |\n' - ' m_expr "//" u_expr | m_expr "/" u_expr |\n' - ' m_expr "%" u_expr\n' - ' a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr\n' - '\n' - 'The "*" (multiplication) operator yields the product of its ' - 'arguments.\n' - 'The arguments must either both be numbers, or one argument must be ' - 'an\n' - 'integer and the other must be a sequence. In the former case, the\n' - 'numbers are converted to a common real type and then multiplied\n' - 'together. In the latter case, sequence repetition is performed; ' - 'a\n' - 'negative repetition factor yields an empty sequence.\n' - '\n' - 'This operation can be customized using the special "__mul__()" ' - 'and\n' - '"__rmul__()" methods.\n' - '\n' - 'Changed in version 3.14: If only one operand is a complex number, ' - 'the\n' - 'other operand is converted to a floating-point number.\n' - '\n' - 'The "@" (at) operator is intended to be used for matrix\n' - 'multiplication. No builtin Python types implement this operator.\n' - '\n' - 'This operation can be customized using the special "__matmul__()" ' - 'and\n' - '"__rmatmul__()" methods.\n' - '\n' - 'Added in version 3.5.\n' - '\n' - 'The "/" (division) and "//" (floor division) operators yield the\n' - 'quotient of their arguments. The numeric arguments are first\n' - 'converted to a common type. Division of integers yields a float, ' - 'while\n' - 'floor division of integers results in an integer; the result is ' - 'that\n' - 'of mathematical division with the ‘floor’ function applied to the\n' - 'result. Division by zero raises the "ZeroDivisionError" ' - 'exception.\n' - '\n' - 'The division operation can be customized using the special\n' - '"__truediv__()" and "__rtruediv__()" methods. The floor division\n' - 'operation can be customized using the special "__floordiv__()" ' - 'and\n' - '"__rfloordiv__()" methods.\n' - '\n' - 'The "%" (modulo) operator yields the remainder from the division ' - 'of\n' - 'the first argument by the second. The numeric arguments are ' - 'first\n' - 'converted to a common type. A zero right argument raises the\n' - '"ZeroDivisionError" exception. The arguments may be ' - 'floating-point\n' - 'numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals ' - '"4*0.7 +\n' - '0.34".) The modulo operator always yields a result with the same ' - 'sign\n' - 'as its second operand (or zero); the absolute value of the result ' - 'is\n' - 'strictly smaller than the absolute value of the second operand ' - '[1].\n' - '\n' - 'The floor division and modulo operators are connected by the ' - 'following\n' - 'identity: "x == (x//y)*y + (x%y)". Floor division and modulo are ' - 'also\n' - 'connected with the built-in function "divmod()": "divmod(x, y) ==\n' - '(x//y, x%y)". [2].\n' - '\n' - 'In addition to performing the modulo operation on numbers, the ' - '"%"\n' - 'operator is also overloaded by string objects to perform ' - 'old-style\n' - 'string formatting (also known as interpolation). The syntax for\n' - 'string formatting is described in the Python Library Reference,\n' - 'section printf-style String Formatting.\n' - '\n' - 'The *modulo* operation can be customized using the special ' - '"__mod__()"\n' - 'and "__rmod__()" methods.\n' - '\n' - 'The floor division operator, the modulo operator, and the ' - '"divmod()"\n' - 'function are not defined for complex numbers. Instead, convert to ' - 'a\n' - 'floating-point number using the "abs()" function if appropriate.\n' - '\n' - 'The "+" (addition) operator yields the sum of its arguments. The\n' - 'arguments must either both be numbers or both be sequences of the ' - 'same\n' - 'type. In the former case, the numbers are converted to a common ' - 'real\n' - 'type and then added together. In the latter case, the sequences ' - 'are\n' - 'concatenated.\n' - '\n' - 'This operation can be customized using the special "__add__()" ' - 'and\n' - '"__radd__()" methods.\n' - '\n' - 'Changed in version 3.14: If only one operand is a complex number, ' - 'the\n' - 'other operand is converted to a floating-point number.\n' - '\n' - 'The "-" (subtraction) operator yields the difference of its ' - 'arguments.\n' - 'The numeric arguments are first converted to a common real type.\n' - '\n' - 'This operation can be customized using the special "__sub__()" ' - 'and\n' - '"__rsub__()" methods.\n' - '\n' - 'Changed in version 3.14: If only one operand is a complex number, ' - 'the\n' - 'other operand is converted to a floating-point number.\n', - 'bitwise': 'Binary bitwise operations\n' - '*************************\n' - '\n' - 'Each of the three bitwise operations has a different priority ' - 'level:\n' - '\n' - ' and_expr ::= shift_expr | and_expr "&" shift_expr\n' - ' xor_expr ::= and_expr | xor_expr "^" and_expr\n' - ' or_expr ::= xor_expr | or_expr "|" xor_expr\n' - '\n' - 'The "&" operator yields the bitwise AND of its arguments, which ' - 'must\n' - 'be integers or one of them must be a custom object overriding\n' - '"__and__()" or "__rand__()" special methods.\n' - '\n' - 'The "^" operator yields the bitwise XOR (exclusive OR) of its\n' - 'arguments, which must be integers or one of them must be a ' - 'custom\n' - 'object overriding "__xor__()" or "__rxor__()" special methods.\n' - '\n' - 'The "|" operator yields the bitwise (inclusive) OR of its ' - 'arguments,\n' - 'which must be integers or one of them must be a custom object\n' - 'overriding "__or__()" or "__ror__()" special methods.\n', - 'bltin-code-objects': 'Code Objects\n' - '************\n' - '\n' - 'Code objects are used by the implementation to ' - 'represent “pseudo-\n' - 'compiled†executable Python code such as a function ' - 'body. They differ\n' - 'from function objects because they don’t contain a ' - 'reference to their\n' - 'global execution environment. Code objects are ' - 'returned by the built-\n' - 'in "compile()" function and can be extracted from ' - 'function objects\n' - 'through their "__code__" attribute. See also the ' - '"code" module.\n' - '\n' - 'Accessing "__code__" raises an auditing event ' - '"object.__getattr__"\n' - 'with arguments "obj" and ""__code__"".\n' - '\n' - 'A code object can be executed or evaluated by passing ' - 'it (instead of a\n' - 'source string) to the "exec()" or "eval()" built-in ' - 'functions.\n' - '\n' - 'See The standard type hierarchy for more ' - 'information.\n', - 'bltin-ellipsis-object': 'The Ellipsis Object\n' - '*******************\n' - '\n' - 'This object is commonly used by slicing (see ' - 'Slicings). It supports\n' - 'no special operations. There is exactly one ' - 'ellipsis object, named\n' - '"Ellipsis" (a built-in name). "type(Ellipsis)()" ' - 'produces the\n' - '"Ellipsis" singleton.\n' - '\n' - 'It is written as "Ellipsis" or "...".\n', - 'bltin-null-object': 'The Null Object\n' - '***************\n' - '\n' - 'This object is returned by functions that don’t ' - 'explicitly return a\n' - 'value. It supports no special operations. There is ' - 'exactly one null\n' - 'object, named "None" (a built-in name). "type(None)()" ' - 'produces the\n' - 'same singleton.\n' - '\n' - 'It is written as "None".\n', - 'bltin-type-objects': 'Type Objects\n' - '************\n' - '\n' - 'Type objects represent the various object types. An ' - 'object’s type is\n' - 'accessed by the built-in function "type()". There are ' - 'no special\n' - 'operations on types. The standard module "types" ' - 'defines names for\n' - 'all standard built-in types.\n' - '\n' - 'Types are written like this: "".\n', - 'booleans': 'Boolean operations\n' - '******************\n' - '\n' - ' or_test ::= and_test | or_test "or" and_test\n' - ' and_test ::= not_test | and_test "and" not_test\n' - ' not_test ::= comparison | "not" not_test\n' - '\n' - 'In the context of Boolean operations, and also when expressions ' - 'are\n' - 'used by control flow statements, the following values are ' - 'interpreted\n' - 'as false: "False", "None", numeric zero of all types, and empty\n' - 'strings and containers (including strings, tuples, lists,\n' - 'dictionaries, sets and frozensets). All other values are ' - 'interpreted\n' - 'as true. User-defined objects can customize their truth value ' - 'by\n' - 'providing a "__bool__()" method.\n' - '\n' - 'The operator "not" yields "True" if its argument is false, ' - '"False"\n' - 'otherwise.\n' - '\n' - 'The expression "x and y" first evaluates *x*; if *x* is false, ' - 'its\n' - 'value is returned; otherwise, *y* is evaluated and the resulting ' - 'value\n' - 'is returned.\n' - '\n' - 'The expression "x or y" first evaluates *x*; if *x* is true, its ' - 'value\n' - 'is returned; otherwise, *y* is evaluated and the resulting value ' - 'is\n' - 'returned.\n' - '\n' - 'Note that neither "and" nor "or" restrict the value and type ' - 'they\n' - 'return to "False" and "True", but rather return the last ' - 'evaluated\n' - 'argument. This is sometimes useful, e.g., if "s" is a string ' - 'that\n' - 'should be replaced by a default value if it is empty, the ' - 'expression\n' - '"s or \'foo\'" yields the desired value. Because "not" has to ' - 'create a\n' - 'new value, it returns a boolean value regardless of the type of ' - 'its\n' - 'argument (for example, "not \'foo\'" produces "False" rather ' - 'than "\'\'".)\n', - 'break': 'The "break" statement\n' - '*********************\n' - '\n' - ' break_stmt ::= "break"\n' - '\n' - '"break" may only occur syntactically nested in a "for" or "while"\n' - 'loop, but not nested in a function or class definition within that\n' - 'loop.\n' - '\n' - 'It terminates the nearest enclosing loop, skipping the optional ' - '"else"\n' - 'clause if the loop has one.\n' - '\n' - 'If a "for" loop is terminated by "break", the loop control target\n' - 'keeps its current value.\n' - '\n' - 'When "break" passes control out of a "try" statement with a ' - '"finally"\n' - 'clause, that "finally" clause is executed before really leaving ' - 'the\n' - 'loop.\n', - 'callable-types': 'Emulating callable objects\n' - '**************************\n' - '\n' - 'object.__call__(self[, args...])\n' - '\n' - ' Called when the instance is “called†as a function; if ' - 'this method\n' - ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)". The "object" class ' - 'itself does\n' - ' not provide this method.\n', - 'calls': 'Calls\n' - '*****\n' - '\n' - 'A call calls a callable object (e.g., a *function*) with a ' - 'possibly\n' - 'empty series of *arguments*:\n' - '\n' - ' call ::= primary "(" [argument_list [","] | ' - 'comprehension] ")"\n' - ' argument_list ::= positional_arguments ["," ' - 'starred_and_keywords]\n' - ' ["," keywords_arguments]\n' - ' | starred_and_keywords ["," ' - 'keywords_arguments]\n' - ' | keywords_arguments\n' - ' positional_arguments ::= positional_item ("," positional_item)*\n' - ' positional_item ::= assignment_expression | "*" expression\n' - ' starred_and_keywords ::= ("*" expression | keyword_item)\n' - ' ("," "*" expression | "," ' - 'keyword_item)*\n' - ' keywords_arguments ::= (keyword_item | "**" expression)\n' - ' ("," keyword_item | "," "**" ' - 'expression)*\n' - ' keyword_item ::= identifier "=" expression\n' - '\n' - 'An optional trailing comma may be present after the positional and\n' - 'keyword arguments but does not affect the semantics.\n' - '\n' - 'The primary must evaluate to a callable object (user-defined\n' - 'functions, built-in functions, methods of built-in objects, class\n' - 'objects, methods of class instances, and all objects having a\n' - '"__call__()" method are callable). All argument expressions are\n' - 'evaluated before the call is attempted. Please refer to section\n' - 'Function definitions for the syntax of formal *parameter* lists.\n' - '\n' - 'If keyword arguments are present, they are first converted to\n' - 'positional arguments, as follows. First, a list of unfilled slots ' - 'is\n' - 'created for the formal parameters. If there are N positional\n' - 'arguments, they are placed in the first N slots. Next, for each\n' - 'keyword argument, the identifier is used to determine the\n' - 'corresponding slot (if the identifier is the same as the first ' - 'formal\n' - 'parameter name, the first slot is used, and so on). If the slot ' - 'is\n' - 'already filled, a "TypeError" exception is raised. Otherwise, the\n' - 'argument is placed in the slot, filling it (even if the expression ' - 'is\n' - '"None", it fills the slot). When all arguments have been ' - 'processed,\n' - 'the slots that are still unfilled are filled with the ' - 'corresponding\n' - 'default value from the function definition. (Default values are\n' - 'calculated, once, when the function is defined; thus, a mutable ' - 'object\n' - 'such as a list or dictionary used as default value will be shared ' - 'by\n' - 'all calls that don’t specify an argument value for the ' - 'corresponding\n' - 'slot; this should usually be avoided.) If there are any unfilled\n' - 'slots for which no default value is specified, a "TypeError" ' - 'exception\n' - 'is raised. Otherwise, the list of filled slots is used as the\n' - 'argument list for the call.\n' - '\n' - '**CPython implementation detail:** An implementation may provide\n' - 'built-in functions whose positional parameters do not have names, ' - 'even\n' - 'if they are ‘named’ for the purpose of documentation, and which\n' - 'therefore cannot be supplied by keyword. In CPython, this is the ' - 'case\n' - 'for functions implemented in C that use "PyArg_ParseTuple()" to ' - 'parse\n' - 'their arguments.\n' - '\n' - 'If there are more positional arguments than there are formal ' - 'parameter\n' - 'slots, a "TypeError" exception is raised, unless a formal ' - 'parameter\n' - 'using the syntax "*identifier" is present; in this case, that ' - 'formal\n' - 'parameter receives a tuple containing the excess positional ' - 'arguments\n' - '(or an empty tuple if there were no excess positional arguments).\n' - '\n' - 'If any keyword argument does not correspond to a formal parameter\n' - 'name, a "TypeError" exception is raised, unless a formal parameter\n' - 'using the syntax "**identifier" is present; in this case, that ' - 'formal\n' - 'parameter receives a dictionary containing the excess keyword\n' - 'arguments (using the keywords as keys and the argument values as\n' - 'corresponding values), or a (new) empty dictionary if there were ' - 'no\n' - 'excess keyword arguments.\n' - '\n' - 'If the syntax "*expression" appears in the function call, ' - '"expression"\n' - 'must evaluate to an *iterable*. Elements from these iterables are\n' - 'treated as if they were additional positional arguments. For the ' - 'call\n' - '"f(x1, x2, *y, x3, x4)", if *y* evaluates to a sequence *y1*, …, ' - '*yM*,\n' - 'this is equivalent to a call with M+4 positional arguments *x1*, ' - '*x2*,\n' - '*y1*, …, *yM*, *x3*, *x4*.\n' - '\n' - 'A consequence of this is that although the "*expression" syntax ' - 'may\n' - 'appear *after* explicit keyword arguments, it is processed ' - '*before*\n' - 'the keyword arguments (and any "**expression" arguments – see ' - 'below).\n' - 'So:\n' - '\n' - ' >>> def f(a, b):\n' - ' ... print(a, b)\n' - ' ...\n' - ' >>> f(b=1, *(2,))\n' - ' 2 1\n' - ' >>> f(a=1, *(2,))\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: f() got multiple values for keyword argument 'a'\n" - ' >>> f(1, *(2,))\n' - ' 1 2\n' - '\n' - 'It is unusual for both keyword arguments and the "*expression" ' - 'syntax\n' - 'to be used in the same call, so in practice this confusion does ' - 'not\n' - 'often arise.\n' - '\n' - 'If the syntax "**expression" appears in the function call,\n' - '"expression" must evaluate to a *mapping*, the contents of which ' - 'are\n' - 'treated as additional keyword arguments. If a parameter matching a ' - 'key\n' - 'has already been given a value (by an explicit keyword argument, ' - 'or\n' - 'from another unpacking), a "TypeError" exception is raised.\n' - '\n' - 'When "**expression" is used, each key in this mapping must be a\n' - 'string. Each value from the mapping is assigned to the first ' - 'formal\n' - 'parameter eligible for keyword assignment whose name is equal to ' - 'the\n' - 'key. A key need not be a Python identifier (e.g. ""max-temp °F"" ' - 'is\n' - 'acceptable, although it will not match any formal parameter that ' - 'could\n' - 'be declared). If there is no match to a formal parameter the ' - 'key-value\n' - 'pair is collected by the "**" parameter, if there is one, or if ' - 'there\n' - 'is not, a "TypeError" exception is raised.\n' - '\n' - 'Formal parameters using the syntax "*identifier" or "**identifier"\n' - 'cannot be used as positional argument slots or as keyword argument\n' - 'names.\n' - '\n' - 'Changed in version 3.5: Function calls accept any number of "*" ' - 'and\n' - '"**" unpackings, positional arguments may follow iterable ' - 'unpackings\n' - '("*"), and keyword arguments may follow dictionary unpackings ' - '("**").\n' - 'Originally proposed by **PEP 448**.\n' - '\n' - 'A call always returns some value, possibly "None", unless it raises ' - 'an\n' - 'exception. How this value is computed depends on the type of the\n' - 'callable object.\n' - '\n' - 'If it is—\n' - '\n' - 'a user-defined function:\n' - ' The code block for the function is executed, passing it the\n' - ' argument list. The first thing the code block will do is bind ' - 'the\n' - ' formal parameters to the arguments; this is described in ' - 'section\n' - ' Function definitions. When the code block executes a "return"\n' - ' statement, this specifies the return value of the function ' - 'call.\n' - ' If execution reaches the end of the code block without executing ' - 'a\n' - ' "return" statement, the return value is "None".\n' - '\n' - 'a built-in function or method:\n' - ' The result is up to the interpreter; see Built-in Functions for ' - 'the\n' - ' descriptions of built-in functions and methods.\n' - '\n' - 'a class object:\n' - ' A new instance of that class is returned.\n' - '\n' - 'a class instance method:\n' - ' The corresponding user-defined function is called, with an ' - 'argument\n' - ' list that is one longer than the argument list of the call: the\n' - ' instance becomes the first argument.\n' - '\n' - 'a class instance:\n' - ' The class must define a "__call__()" method; the effect is then ' - 'the\n' - ' same as if that method was called.\n', - 'class': 'Class definitions\n' - '*****************\n' - '\n' - 'A class definition defines a class object (see section The ' - 'standard\n' - 'type hierarchy):\n' - '\n' - ' classdef ::= [decorators] "class" classname [type_params] ' - '[inheritance] ":" suite\n' - ' inheritance ::= "(" [argument_list] ")"\n' - ' classname ::= identifier\n' - '\n' - 'A class definition is an executable statement. The inheritance ' - 'list\n' - 'usually gives a list of base classes (see Metaclasses for more\n' - 'advanced uses), so each item in the list should evaluate to a ' - 'class\n' - 'object which allows subclassing. Classes without an inheritance ' - 'list\n' - 'inherit, by default, from the base class "object"; hence,\n' - '\n' - ' class Foo:\n' - ' pass\n' - '\n' - 'is equivalent to\n' - '\n' - ' class Foo(object):\n' - ' pass\n' - '\n' - 'The class’s suite is then executed in a new execution frame (see\n' - 'Naming and binding), using a newly created local namespace and the\n' - 'original global namespace. (Usually, the suite contains mostly\n' - 'function definitions.) When the class’s suite finishes execution, ' - 'its\n' - 'execution frame is discarded but its local namespace is saved. [5] ' - 'A\n' - 'class object is then created using the inheritance list for the ' - 'base\n' - 'classes and the saved local namespace for the attribute ' - 'dictionary.\n' - 'The class name is bound to this class object in the original local\n' - 'namespace.\n' - '\n' - 'The order in which attributes are defined in the class body is\n' - 'preserved in the new class’s "__dict__". Note that this is ' - 'reliable\n' - 'only right after the class is created and only for classes that ' - 'were\n' - 'defined using the definition syntax.\n' - '\n' - 'Class creation can be customized heavily using metaclasses.\n' - '\n' - 'Classes can also be decorated: just like when decorating ' - 'functions,\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' class Foo: pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' class Foo: pass\n' - ' Foo = f1(arg)(f2(Foo))\n' - '\n' - 'The evaluation rules for the decorator expressions are the same as ' - 'for\n' - 'function decorators. The result is then bound to the class name.\n' - '\n' - 'Changed in version 3.9: Classes may be decorated with any valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'immediately\n' - 'after the class’s name. This indicates to static type checkers ' - 'that\n' - 'the class is generic. At runtime, the type parameters can be ' - 'retrieved\n' - 'from the class’s "__type_params__" attribute. See Generic classes ' - 'for\n' - 'more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - '**Programmer’s note:** Variables defined in the class definition ' - 'are\n' - 'class attributes; they are shared by instances. Instance ' - 'attributes\n' - 'can be set in a method with "self.name = value". Both class and\n' - 'instance attributes are accessible through the notation ' - '“"self.name"â€,\n' - 'and an instance attribute hides a class attribute with the same ' - 'name\n' - 'when accessed in this way. Class attributes can be used as ' - 'defaults\n' - 'for instance attributes, but using mutable values there can lead ' - 'to\n' - 'unexpected results. Descriptors can be used to create instance\n' - 'variables with different implementation details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' The proposal that changed the declaration of metaclasses to ' - 'the\n' - ' current syntax, and the semantics for how classes with\n' - ' metaclasses are constructed.\n' - '\n' - ' **PEP 3129** - Class Decorators\n' - ' The proposal that added class decorators. Function and ' - 'method\n' - ' decorators were introduced in **PEP 318**.\n', - 'comparisons': 'Comparisons\n' - '***********\n' - '\n' - 'Unlike C, all comparison operations in Python have the same ' - 'priority,\n' - 'which is lower than that of any arithmetic, shifting or ' - 'bitwise\n' - 'operation. Also unlike C, expressions like "a < b < c" have ' - 'the\n' - 'interpretation that is conventional in mathematics:\n' - '\n' - ' comparison ::= or_expr (comp_operator or_expr)*\n' - ' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n' - ' | "is" ["not"] | ["not"] "in"\n' - '\n' - 'Comparisons yield boolean values: "True" or "False". Custom ' - '*rich\n' - 'comparison methods* may return non-boolean values. In this ' - 'case Python\n' - 'will call "bool()" on such value in boolean contexts.\n' - '\n' - 'Comparisons can be chained arbitrarily, e.g., "x < y <= z" ' - 'is\n' - 'equivalent to "x < y and y <= z", except that "y" is ' - 'evaluated only\n' - 'once (but in both cases "z" is not evaluated at all when "x < ' - 'y" is\n' - 'found to be false).\n' - '\n' - 'Formally, if *a*, *b*, *c*, …, *y*, *z* are expressions and ' - '*op1*,\n' - '*op2*, …, *opN* are comparison operators, then "a op1 b op2 c ' - '... y\n' - 'opN z" is equivalent to "a op1 b and b op2 c and ... y opN ' - 'z", except\n' - 'that each expression is evaluated at most once.\n' - '\n' - 'Note that "a op1 b op2 c" doesn’t imply any kind of ' - 'comparison between\n' - '*a* and *c*, so that, e.g., "x < y > z" is perfectly legal ' - '(though\n' - 'perhaps not pretty).\n' - '\n' - '\n' - 'Value comparisons\n' - '=================\n' - '\n' - 'The operators "<", ">", "==", ">=", "<=", and "!=" compare ' - 'the values\n' - 'of two objects. The objects do not need to have the same ' - 'type.\n' - '\n' - 'Chapter Objects, values and types states that objects have a ' - 'value (in\n' - 'addition to type and identity). The value of an object is a ' - 'rather\n' - 'abstract notion in Python: For example, there is no canonical ' - 'access\n' - 'method for an object’s value. Also, there is no requirement ' - 'that the\n' - 'value of an object should be constructed in a particular way, ' - 'e.g.\n' - 'comprised of all its data attributes. Comparison operators ' - 'implement a\n' - 'particular notion of what the value of an object is. One can ' - 'think of\n' - 'them as defining the value of an object indirectly, by means ' - 'of their\n' - 'comparison implementation.\n' - '\n' - 'Because all types are (direct or indirect) subtypes of ' - '"object", they\n' - 'inherit the default comparison behavior from "object". Types ' - 'can\n' - 'customize their comparison behavior by implementing *rich ' - 'comparison\n' - 'methods* like "__lt__()", described in Basic customization.\n' - '\n' - 'The default behavior for equality comparison ("==" and "!=") ' - 'is based\n' - 'on the identity of the objects. Hence, equality comparison ' - 'of\n' - 'instances with the same identity results in equality, and ' - 'equality\n' - 'comparison of instances with different identities results in\n' - 'inequality. A motivation for this default behavior is the ' - 'desire that\n' - 'all objects should be reflexive (i.e. "x is y" implies "x == ' - 'y").\n' - '\n' - 'A default order comparison ("<", ">", "<=", and ">=") is not ' - 'provided;\n' - 'an attempt raises "TypeError". A motivation for this default ' - 'behavior\n' - 'is the lack of a similar invariant as for equality.\n' - '\n' - 'The behavior of the default equality comparison, that ' - 'instances with\n' - 'different identities are always unequal, may be in contrast ' - 'to what\n' - 'types will need that have a sensible definition of object ' - 'value and\n' - 'value-based equality. Such types will need to customize ' - 'their\n' - 'comparison behavior, and in fact, a number of built-in types ' - 'have done\n' - 'that.\n' - '\n' - 'The following list describes the comparison behavior of the ' - 'most\n' - 'important built-in types.\n' - '\n' - '* Numbers of built-in numeric types (Numeric Types — int, ' - 'float,\n' - ' complex) and of the standard library types ' - '"fractions.Fraction" and\n' - ' "decimal.Decimal" can be compared within and across their ' - 'types,\n' - ' with the restriction that complex numbers do not support ' - 'order\n' - ' comparison. Within the limits of the types involved, they ' - 'compare\n' - ' mathematically (algorithmically) correct without loss of ' - 'precision.\n' - '\n' - ' The not-a-number values "float(\'NaN\')" and ' - '"decimal.Decimal(\'NaN\')"\n' - ' are special. Any ordered comparison of a number to a ' - 'not-a-number\n' - ' value is false. A counter-intuitive implication is that ' - 'not-a-number\n' - ' values are not equal to themselves. For example, if "x =\n' - ' float(\'NaN\')", "3 < x", "x < 3" and "x == x" are all ' - 'false, while "x\n' - ' != x" is true. This behavior is compliant with IEEE 754.\n' - '\n' - '* "None" and "NotImplemented" are singletons. **PEP 8** ' - 'advises that\n' - ' comparisons for singletons should always be done with "is" ' - 'or "is\n' - ' not", never the equality operators.\n' - '\n' - '* Binary sequences (instances of "bytes" or "bytearray") can ' - 'be\n' - ' compared within and across their types. They compare\n' - ' lexicographically using the numeric values of their ' - 'elements.\n' - '\n' - '* Strings (instances of "str") compare lexicographically ' - 'using the\n' - ' numerical Unicode code points (the result of the built-in ' - 'function\n' - ' "ord()") of their characters. [3]\n' - '\n' - ' Strings and binary sequences cannot be directly compared.\n' - '\n' - '* Sequences (instances of "tuple", "list", or "range") can be ' - 'compared\n' - ' only within each of their types, with the restriction that ' - 'ranges do\n' - ' not support order comparison. Equality comparison across ' - 'these\n' - ' types results in inequality, and ordering comparison across ' - 'these\n' - ' types raises "TypeError".\n' - '\n' - ' Sequences compare lexicographically using comparison of\n' - ' corresponding elements. The built-in containers typically ' - 'assume\n' - ' identical objects are equal to themselves. That lets them ' - 'bypass\n' - ' equality tests for identical objects to improve performance ' - 'and to\n' - ' maintain their internal invariants.\n' - '\n' - ' Lexicographical comparison between built-in collections ' - 'works as\n' - ' follows:\n' - '\n' - ' * For two collections to compare equal, they must be of the ' - 'same\n' - ' type, have the same length, and each pair of ' - 'corresponding\n' - ' elements must compare equal (for example, "[1,2] == ' - '(1,2)" is\n' - ' false because the type is not the same).\n' - '\n' - ' * Collections that support order comparison are ordered the ' - 'same as\n' - ' their first unequal elements (for example, "[1,2,x] <= ' - '[1,2,y]"\n' - ' has the same value as "x <= y"). If a corresponding ' - 'element does\n' - ' not exist, the shorter collection is ordered first (for ' - 'example,\n' - ' "[1,2] < [1,2,3]" is true).\n' - '\n' - '* Mappings (instances of "dict") compare equal if and only if ' - 'they\n' - ' have equal "(key, value)" pairs. Equality comparison of the ' - 'keys and\n' - ' values enforces reflexivity.\n' - '\n' - ' Order comparisons ("<", ">", "<=", and ">=") raise ' - '"TypeError".\n' - '\n' - '* Sets (instances of "set" or "frozenset") can be compared ' - 'within and\n' - ' across their types.\n' - '\n' - ' They define order comparison operators to mean subset and ' - 'superset\n' - ' tests. Those relations do not define total orderings (for ' - 'example,\n' - ' the two sets "{1,2}" and "{2,3}" are not equal, nor subsets ' - 'of one\n' - ' another, nor supersets of one another). Accordingly, sets ' - 'are not\n' - ' appropriate arguments for functions which depend on total ' - 'ordering\n' - ' (for example, "min()", "max()", and "sorted()" produce ' - 'undefined\n' - ' results given a list of sets as inputs).\n' - '\n' - ' Comparison of sets enforces reflexivity of its elements.\n' - '\n' - '* Most other built-in types have no comparison methods ' - 'implemented, so\n' - ' they inherit the default comparison behavior.\n' - '\n' - 'User-defined classes that customize their comparison behavior ' - 'should\n' - 'follow some consistency rules, if possible:\n' - '\n' - '* Equality comparison should be reflexive. In other words, ' - 'identical\n' - ' objects should compare equal:\n' - '\n' - ' "x is y" implies "x == y"\n' - '\n' - '* Comparison should be symmetric. In other words, the ' - 'following\n' - ' expressions should have the same result:\n' - '\n' - ' "x == y" and "y == x"\n' - '\n' - ' "x != y" and "y != x"\n' - '\n' - ' "x < y" and "y > x"\n' - '\n' - ' "x <= y" and "y >= x"\n' - '\n' - '* Comparison should be transitive. The following ' - '(non-exhaustive)\n' - ' examples illustrate that:\n' - '\n' - ' "x > y and y > z" implies "x > z"\n' - '\n' - ' "x < y and y <= z" implies "x < z"\n' - '\n' - '* Inverse comparison should result in the boolean negation. ' - 'In other\n' - ' words, the following expressions should have the same ' - 'result:\n' - '\n' - ' "x == y" and "not x != y"\n' - '\n' - ' "x < y" and "not x >= y" (for total ordering)\n' - '\n' - ' "x > y" and "not x <= y" (for total ordering)\n' - '\n' - ' The last two expressions apply to totally ordered ' - 'collections (e.g.\n' - ' to sequences, but not to sets or mappings). See also the\n' - ' "total_ordering()" decorator.\n' - '\n' - '* The "hash()" result should be consistent with equality. ' - 'Objects that\n' - ' are equal should either have the same hash value, or be ' - 'marked as\n' - ' unhashable.\n' - '\n' - 'Python does not enforce these consistency rules. In fact, ' - 'the\n' - 'not-a-number values are an example for not following these ' - 'rules.\n' - '\n' - '\n' - 'Membership test operations\n' - '==========================\n' - '\n' - 'The operators "in" and "not in" test for membership. "x in ' - 's"\n' - 'evaluates to "True" if *x* is a member of *s*, and "False" ' - 'otherwise.\n' - '"x not in s" returns the negation of "x in s". All built-in ' - 'sequences\n' - 'and set types support this as well as dictionary, for which ' - '"in" tests\n' - 'whether the dictionary has a given key. For container types ' - 'such as\n' - 'list, tuple, set, frozenset, dict, or collections.deque, the\n' - 'expression "x in y" is equivalent to "any(x is e or x == e ' - 'for e in\n' - 'y)".\n' - '\n' - 'For the string and bytes types, "x in y" is "True" if and ' - 'only if *x*\n' - 'is a substring of *y*. An equivalent test is "y.find(x) != ' - '-1".\n' - 'Empty strings are always considered to be a substring of any ' - 'other\n' - 'string, so """ in "abc"" will return "True".\n' - '\n' - 'For user-defined classes which define the "__contains__()" ' - 'method, "x\n' - 'in y" returns "True" if "y.__contains__(x)" returns a true ' - 'value, and\n' - '"False" otherwise.\n' - '\n' - 'For user-defined classes which do not define "__contains__()" ' - 'but do\n' - 'define "__iter__()", "x in y" is "True" if some value "z", ' - 'for which\n' - 'the expression "x is z or x == z" is true, is produced while ' - 'iterating\n' - 'over "y". If an exception is raised during the iteration, it ' - 'is as if\n' - '"in" raised that exception.\n' - '\n' - 'Lastly, the old-style iteration protocol is tried: if a class ' - 'defines\n' - '"__getitem__()", "x in y" is "True" if and only if there is a ' - 'non-\n' - 'negative integer index *i* such that "x is y[i] or x == ' - 'y[i]", and no\n' - 'lower integer index raises the "IndexError" exception. (If ' - 'any other\n' - 'exception is raised, it is as if "in" raised that ' - 'exception).\n' - '\n' - 'The operator "not in" is defined to have the inverse truth ' - 'value of\n' - '"in".\n' - '\n' - '\n' - 'Identity comparisons\n' - '====================\n' - '\n' - 'The operators "is" and "is not" test for an object’s ' - 'identity: "x is\n' - 'y" is true if and only if *x* and *y* are the same object. ' - 'An\n' - 'Object’s identity is determined using the "id()" function. ' - '"x is not\n' - 'y" yields the inverse truth value. [4]\n', - 'compound': 'Compound statements\n' - '*******************\n' - '\n' - 'Compound statements contain (groups of) other statements; they ' - 'affect\n' - 'or control the execution of those other statements in some way. ' - 'In\n' - 'general, compound statements span multiple lines, although in ' - 'simple\n' - 'incarnations a whole compound statement may be contained in one ' - 'line.\n' - '\n' - 'The "if", "while" and "for" statements implement traditional ' - 'control\n' - 'flow constructs. "try" specifies exception handlers and/or ' - 'cleanup\n' - 'code for a group of statements, while the "with" statement ' - 'allows the\n' - 'execution of initialization and finalization code around a block ' - 'of\n' - 'code. Function and class definitions are also syntactically ' - 'compound\n' - 'statements.\n' - '\n' - 'A compound statement consists of one or more ‘clauses.’ A ' - 'clause\n' - 'consists of a header and a ‘suite.’ The clause headers of a\n' - 'particular compound statement are all at the same indentation ' - 'level.\n' - 'Each clause header begins with a uniquely identifying keyword ' - 'and ends\n' - 'with a colon. A suite is a group of statements controlled by a\n' - 'clause. A suite can be one or more semicolon-separated simple\n' - 'statements on the same line as the header, following the ' - 'header’s\n' - 'colon, or it can be one or more indented statements on ' - 'subsequent\n' - 'lines. Only the latter form of a suite can contain nested ' - 'compound\n' - 'statements; the following is illegal, mostly because it wouldn’t ' - 'be\n' - 'clear to which "if" clause a following "else" clause would ' - 'belong:\n' - '\n' - ' if test1: if test2: print(x)\n' - '\n' - 'Also note that the semicolon binds tighter than the colon in ' - 'this\n' - 'context, so that in the following example, either all or none of ' - 'the\n' - '"print()" calls are executed:\n' - '\n' - ' if x < y < z: print(x); print(y); print(z)\n' - '\n' - 'Summarizing:\n' - '\n' - ' compound_stmt ::= if_stmt\n' - ' | while_stmt\n' - ' | for_stmt\n' - ' | try_stmt\n' - ' | with_stmt\n' - ' | match_stmt\n' - ' | funcdef\n' - ' | classdef\n' - ' | async_with_stmt\n' - ' | async_for_stmt\n' - ' | async_funcdef\n' - ' suite ::= stmt_list NEWLINE | NEWLINE INDENT ' - 'statement+ DEDENT\n' - ' statement ::= stmt_list NEWLINE | compound_stmt\n' - ' stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n' - '\n' - 'Note that statements always end in a "NEWLINE" possibly followed ' - 'by a\n' - '"DEDENT". Also note that optional continuation clauses always ' - 'begin\n' - 'with a keyword that cannot start a statement, thus there are no\n' - 'ambiguities (the ‘dangling "else"’ problem is solved in Python ' - 'by\n' - 'requiring nested "if" statements to be indented).\n' - '\n' - 'The formatting of the grammar rules in the following sections ' - 'places\n' - 'each clause on a separate line for clarity.\n' - '\n' - '\n' - 'The "if" statement\n' - '==================\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the ' - 'expressions one\n' - 'by one until one is found to be true (see section Boolean ' - 'operations\n' - 'for the definition of true and false); then that suite is ' - 'executed\n' - '(and no other part of the "if" statement is executed or ' - 'evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, ' - 'if\n' - 'present, is executed.\n' - '\n' - '\n' - 'The "while" statement\n' - '=====================\n' - '\n' - 'The "while" statement is used for repeated execution as long as ' - 'an\n' - 'expression is true:\n' - '\n' - ' while_stmt ::= "while" assignment_expression ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'This repeatedly tests the expression and, if it is true, ' - 'executes the\n' - 'first suite; if the expression is false (which may be the first ' - 'time\n' - 'it is tested) the suite of the "else" clause, if present, is ' - 'executed\n' - 'and the loop terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and goes ' - 'back\n' - 'to testing the expression.\n' - '\n' - '\n' - 'The "for" statement\n' - '===================\n' - '\n' - 'The "for" statement is used to iterate over the elements of a ' - 'sequence\n' - '(such as a string, tuple or list) or other iterable object:\n' - '\n' - ' for_stmt ::= "for" target_list "in" starred_list ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'The "starred_list" expression is evaluated once; it should yield ' - 'an\n' - '*iterable* object. An *iterator* is created for that iterable. ' - 'The\n' - 'first item provided by the iterator is then assigned to the ' - 'target\n' - 'list using the standard rules for assignments (see Assignment\n' - 'statements), and the suite is executed. This repeats for each ' - 'item\n' - 'provided by the iterator. When the iterator is exhausted, the ' - 'suite\n' - 'in the "else" clause, if present, is executed, and the loop\n' - 'terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and ' - 'continues\n' - 'with the next item, or with the "else" clause if there is no ' - 'next\n' - 'item.\n' - '\n' - 'The for-loop makes assignments to the variables in the target ' - 'list.\n' - 'This overwrites all previous assignments to those variables ' - 'including\n' - 'those made in the suite of the for-loop:\n' - '\n' - ' for i in range(10):\n' - ' print(i)\n' - ' i = 5 # this will not affect the for-loop\n' - ' # because i will be overwritten with ' - 'the next\n' - ' # index in the range\n' - '\n' - 'Names in the target list are not deleted when the loop is ' - 'finished,\n' - 'but if the sequence is empty, they will not have been assigned ' - 'to at\n' - 'all by the loop. Hint: the built-in type "range()" represents\n' - 'immutable arithmetic sequences of integers. For instance, ' - 'iterating\n' - '"range(3)" successively yields 0, 1, and then 2.\n' - '\n' - 'Changed in version 3.11: Starred elements are now allowed in ' - 'the\n' - 'expression list.\n' - '\n' - '\n' - 'The "try" statement\n' - '===================\n' - '\n' - 'The "try" statement specifies exception handlers and/or cleanup ' - 'code\n' - 'for a group of statements:\n' - '\n' - ' try_stmt ::= try1_stmt | try2_stmt | try3_stmt\n' - ' try1_stmt ::= "try" ":" suite\n' - ' ("except" [expression ["as" identifier]] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try2_stmt ::= "try" ":" suite\n' - ' ("except" "*" expression ["as" identifier] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try3_stmt ::= "try" ":" suite\n' - ' "finally" ":" suite\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' - '\n' - '"except" clause\n' - '---------------\n' - '\n' - 'The "except" clause(s) specify one or more exception handlers. ' - 'When no\n' - 'exception occurs in the "try" clause, no exception handler is\n' - 'executed. When an exception occurs in the "try" suite, a search ' - 'for an\n' - 'exception handler is started. This search inspects the "except"\n' - 'clauses in turn until one is found that matches the exception. ' - 'An\n' - 'expression-less "except" clause, if present, must be last; it ' - 'matches\n' - 'any exception.\n' - '\n' - 'For an "except" clause with an expression, the expression must\n' - 'evaluate to an exception type or a tuple of exception types. ' - 'The\n' - 'raised exception matches an "except" clause whose expression ' - 'evaluates\n' - 'to the class or a *non-virtual base class* of the exception ' - 'object, or\n' - 'to a tuple that contains such a class.\n' - '\n' - 'If no "except" clause matches the exception, the search for an\n' - 'exception handler continues in the surrounding code and on the\n' - 'invocation stack. [1]\n' - '\n' - 'If the evaluation of an expression in the header of an "except" ' - 'clause\n' - 'raises an exception, the original search for a handler is ' - 'canceled and\n' - 'a search starts for the new exception in the surrounding code ' - 'and on\n' - 'the call stack (it is treated as if the entire "try" statement ' - 'raised\n' - 'the exception).\n' - '\n' - 'When a matching "except" clause is found, the exception is ' - 'assigned to\n' - 'the target specified after the "as" keyword in that "except" ' - 'clause,\n' - 'if present, and the "except" clause’s suite is executed. All ' - '"except"\n' - 'clauses must have an executable block. When the end of this ' - 'block is\n' - 'reached, execution continues normally after the entire "try"\n' - 'statement. (This means that if two nested handlers exist for the ' - 'same\n' - 'exception, and the exception occurs in the "try" clause of the ' - 'inner\n' - 'handler, the outer handler will not handle the exception.)\n' - '\n' - 'When an exception has been assigned using "as target", it is ' - 'cleared\n' - 'at the end of the "except" clause. This is as if\n' - '\n' - ' except E as N:\n' - ' foo\n' - '\n' - 'was translated to\n' - '\n' - ' except E as N:\n' - ' try:\n' - ' foo\n' - ' finally:\n' - ' del N\n' - '\n' - 'This means the exception must be assigned to a different name to ' - 'be\n' - 'able to refer to it after the "except" clause. Exceptions are ' - 'cleared\n' - 'because with the traceback attached to them, they form a ' - 'reference\n' - 'cycle with the stack frame, keeping all locals in that frame ' - 'alive\n' - 'until the next garbage collection occurs.\n' - '\n' - 'Before an "except" clause’s suite is executed, the exception is ' - 'stored\n' - 'in the "sys" module, where it can be accessed from within the ' - 'body of\n' - 'the "except" clause by calling "sys.exception()". When leaving ' - 'an\n' - 'exception handler, the exception stored in the "sys" module is ' - 'reset\n' - 'to its previous value:\n' - '\n' - ' >>> print(sys.exception())\n' - ' None\n' - ' >>> try:\n' - ' ... raise TypeError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... try:\n' - ' ... raise ValueError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... print(repr(sys.exception()))\n' - ' ...\n' - ' TypeError()\n' - ' ValueError()\n' - ' TypeError()\n' - ' >>> print(sys.exception())\n' - ' None\n' - '\n' - '\n' - '"except*" clause\n' - '----------------\n' - '\n' - 'The "except*" clause(s) are used for handling "ExceptionGroup"s. ' - 'The\n' - 'exception type for matching is interpreted as in the case of ' - '"except",\n' - 'but in the case of exception groups we can have partial matches ' - 'when\n' - 'the type matches some of the exceptions in the group. This means ' - 'that\n' - 'multiple "except*" clauses can execute, each handling part of ' - 'the\n' - 'exception group. Each clause executes at most once and handles ' - 'an\n' - 'exception group of all matching exceptions. Each exception in ' - 'the\n' - 'group is handled by at most one "except*" clause, the first ' - 'that\n' - 'matches it.\n' - '\n' - ' >>> try:\n' - ' ... raise ExceptionGroup("eg",\n' - ' ... [ValueError(1), TypeError(2), OSError(3), ' - 'OSError(4)])\n' - ' ... except* TypeError as e:\n' - " ... print(f'caught {type(e)} with nested " - "{e.exceptions}')\n" - ' ... except* OSError as e:\n' - " ... print(f'caught {type(e)} with nested " - "{e.exceptions}')\n" - ' ...\n' - " caught with nested (TypeError(2),)\n" - " caught with nested (OSError(3), " - 'OSError(4))\n' - ' + Exception Group Traceback (most recent call last):\n' - ' | File "", line 2, in \n' - ' | ExceptionGroup: eg\n' - ' +-+---------------- 1 ----------------\n' - ' | ValueError: 1\n' - ' +------------------------------------\n' - '\n' - 'Any remaining exceptions that were not handled by any "except*" ' - 'clause\n' - 'are re-raised at the end, along with all exceptions that were ' - 'raised\n' - 'from within the "except*" clauses. If this list contains more ' - 'than one\n' - 'exception to reraise, they are combined into an exception ' - 'group.\n' - '\n' - 'If the raised exception is not an exception group and its type ' - 'matches\n' - 'one of the "except*" clauses, it is caught and wrapped by an ' - 'exception\n' - 'group with an empty message string.\n' - '\n' - ' >>> try:\n' - ' ... raise BlockingIOError\n' - ' ... except* BlockingIOError as e:\n' - ' ... print(repr(e))\n' - ' ...\n' - " ExceptionGroup('', (BlockingIOError()))\n" - '\n' - 'An "except*" clause must have a matching expression; it cannot ' - 'be\n' - '"except*:". Furthermore, this expression cannot contain ' - 'exception\n' - 'group types, because that would have ambiguous semantics.\n' - '\n' - 'It is not possible to mix "except" and "except*" in the same ' - '"try".\n' - '"break", "continue" and "return" cannot appear in an "except*" ' - 'clause.\n' - '\n' - '\n' - '"else" clause\n' - '-------------\n' - '\n' - 'The optional "else" clause is executed if the control flow ' - 'leaves the\n' - '"try" suite, no exception was raised, and no "return", ' - '"continue", or\n' - '"break" statement was executed. Exceptions in the "else" clause ' - 'are\n' - 'not handled by the preceding "except" clauses.\n' - '\n' - '\n' - '"finally" clause\n' - '----------------\n' - '\n' - 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' - '"try"\n' - 'clause is executed, including any "except" and "else" clauses. ' - 'If an\n' - 'exception occurs in any of the clauses and is not handled, the\n' - 'exception is temporarily saved. The "finally" clause is ' - 'executed. If\n' - 'there is a saved exception it is re-raised at the end of the ' - '"finally"\n' - 'clause. If the "finally" clause raises another exception, the ' - 'saved\n' - 'exception is set as the context of the new exception. If the ' - '"finally"\n' - 'clause executes a "return", "break" or "continue" statement, the ' - 'saved\n' - 'exception is discarded:\n' - '\n' - ' >>> def f():\n' - ' ... try:\n' - ' ... 1/0\n' - ' ... finally:\n' - ' ... return 42\n' - ' ...\n' - ' >>> f()\n' - ' 42\n' - '\n' - 'The exception information is not available to the program ' - 'during\n' - 'execution of the "finally" clause.\n' - '\n' - 'When a "return", "break" or "continue" statement is executed in ' - 'the\n' - '"try" suite of a "try"…"finally" statement, the "finally" clause ' - 'is\n' - 'also executed ‘on the way out.’\n' - '\n' - 'The return value of a function is determined by the last ' - '"return"\n' - 'statement executed. Since the "finally" clause always executes, ' - 'a\n' - '"return" statement executed in the "finally" clause will always ' - 'be the\n' - 'last one executed:\n' - '\n' - ' >>> def foo():\n' - ' ... try:\n' - " ... return 'try'\n" - ' ... finally:\n' - " ... return 'finally'\n" - ' ...\n' - ' >>> foo()\n' - " 'finally'\n" - '\n' - 'Changed in version 3.8: Prior to Python 3.8, a "continue" ' - 'statement\n' - 'was illegal in the "finally" clause due to a problem with the\n' - 'implementation.\n' - '\n' - '\n' - 'The "with" statement\n' - '====================\n' - '\n' - 'The "with" statement is used to wrap the execution of a block ' - 'with\n' - 'methods defined by a context manager (see section With ' - 'Statement\n' - 'Context Managers). This allows common "try"…"except"…"finally" ' - 'usage\n' - 'patterns to be encapsulated for convenient reuse.\n' - '\n' - ' with_stmt ::= "with" ( "(" with_stmt_contents ","? ' - '")" | with_stmt_contents ) ":" suite\n' - ' with_stmt_contents ::= with_item ("," with_item)*\n' - ' with_item ::= expression ["as" target]\n' - '\n' - 'The execution of the "with" statement with one “item†proceeds ' - 'as\n' - 'follows:\n' - '\n' - '1. The context expression (the expression given in the ' - '"with_item") is\n' - ' evaluated to obtain a context manager.\n' - '\n' - '2. The context manager’s "__enter__()" is loaded for later use.\n' - '\n' - '3. The context manager’s "__exit__()" is loaded for later use.\n' - '\n' - '4. The context manager’s "__enter__()" method is invoked.\n' - '\n' - '5. If a target was included in the "with" statement, the return ' - 'value\n' - ' from "__enter__()" is assigned to it.\n' - '\n' - ' Note:\n' - '\n' - ' The "with" statement guarantees that if the "__enter__()" ' - 'method\n' - ' returns without an error, then "__exit__()" will always be\n' - ' called. Thus, if an error occurs during the assignment to ' - 'the\n' - ' target list, it will be treated the same as an error ' - 'occurring\n' - ' within the suite would be. See step 7 below.\n' - '\n' - '6. The suite is executed.\n' - '\n' - '7. The context manager’s "__exit__()" method is invoked. If an\n' - ' exception caused the suite to be exited, its type, value, ' - 'and\n' - ' traceback are passed as arguments to "__exit__()". Otherwise, ' - 'three\n' - ' "None" arguments are supplied.\n' - '\n' - ' If the suite was exited due to an exception, and the return ' - 'value\n' - ' from the "__exit__()" method was false, the exception is ' - 'reraised.\n' - ' If the return value was true, the exception is suppressed, ' - 'and\n' - ' execution continues with the statement following the "with"\n' - ' statement.\n' - '\n' - ' If the suite was exited for any reason other than an ' - 'exception, the\n' - ' return value from "__exit__()" is ignored, and execution ' - 'proceeds\n' - ' at the normal location for the kind of exit that was taken.\n' - '\n' - 'The following code:\n' - '\n' - ' with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' enter = type(manager).__enter__\n' - ' exit = type(manager).__exit__\n' - ' value = enter(manager)\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' if not exit(manager, *sys.exc_info()):\n' - ' raise\n' - ' else:\n' - ' exit(manager, None, None, None)\n' - '\n' - 'With more than one item, the context managers are processed as ' - 'if\n' - 'multiple "with" statements were nested:\n' - '\n' - ' with A() as a, B() as b:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' with A() as a:\n' - ' with B() as b:\n' - ' SUITE\n' - '\n' - 'You can also write multi-item context managers in multiple lines ' - 'if\n' - 'the items are surrounded by parentheses. For example:\n' - '\n' - ' with (\n' - ' A() as a,\n' - ' B() as b,\n' - ' ):\n' - ' SUITE\n' - '\n' - 'Changed in version 3.1: Support for multiple context ' - 'expressions.\n' - '\n' - 'Changed in version 3.10: Support for using grouping parentheses ' - 'to\n' - 'break the statement in multiple lines.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with†statement\n' - ' The specification, background, and examples for the Python ' - '"with"\n' - ' statement.\n' - '\n' - '\n' - 'The "match" statement\n' - '=====================\n' - '\n' - 'Added in version 3.10.\n' - '\n' - 'The match statement is used for pattern matching. Syntax:\n' - '\n' - ' match_stmt ::= \'match\' subject_expr ":" NEWLINE INDENT ' - 'case_block+ DEDENT\n' - ' subject_expr ::= star_named_expression "," ' - 'star_named_expressions?\n' - ' | named_expression\n' - ' case_block ::= \'case\' patterns [guard] ":" block\n' - '\n' - 'Note:\n' - '\n' - ' This section uses single quotes to denote soft keywords.\n' - '\n' - 'Pattern matching takes a pattern as input (following "case") and ' - 'a\n' - 'subject value (following "match"). The pattern (which may ' - 'contain\n' - 'subpatterns) is matched against the subject value. The outcomes ' - 'are:\n' - '\n' - '* A match success or failure (also termed a pattern success or\n' - ' failure).\n' - '\n' - '* Possible binding of matched values to a name. The ' - 'prerequisites for\n' - ' this are further discussed below.\n' - '\n' - 'The "match" and "case" keywords are soft keywords.\n' - '\n' - 'See also:\n' - '\n' - ' * **PEP 634** – Structural Pattern Matching: Specification\n' - '\n' - ' * **PEP 636** – Structural Pattern Matching: Tutorial\n' - '\n' - '\n' - 'Overview\n' - '--------\n' - '\n' - 'Here’s an overview of the logical flow of a match statement:\n' - '\n' - '1. The subject expression "subject_expr" is evaluated and a ' - 'resulting\n' - ' subject value obtained. If the subject expression contains a ' - 'comma,\n' - ' a tuple is constructed using the standard rules.\n' - '\n' - '2. Each pattern in a "case_block" is attempted to match with ' - 'the\n' - ' subject value. The specific rules for success or failure are\n' - ' described below. The match attempt can also bind some or all ' - 'of the\n' - ' standalone names within the pattern. The precise pattern ' - 'binding\n' - ' rules vary per pattern type and are specified below. **Name\n' - ' bindings made during a successful pattern match outlive the\n' - ' executed block and can be used after the match statement**.\n' - '\n' - ' Note:\n' - '\n' - ' During failed pattern matches, some subpatterns may ' - 'succeed. Do\n' - ' not rely on bindings being made for a failed match. ' - 'Conversely,\n' - ' do not rely on variables remaining unchanged after a ' - 'failed\n' - ' match. The exact behavior is dependent on implementation ' - 'and may\n' - ' vary. This is an intentional decision made to allow ' - 'different\n' - ' implementations to add optimizations.\n' - '\n' - '3. If the pattern succeeds, the corresponding guard (if present) ' - 'is\n' - ' evaluated. In this case all name bindings are guaranteed to ' - 'have\n' - ' happened.\n' - '\n' - ' * If the guard evaluates as true or is missing, the "block" ' - 'inside\n' - ' "case_block" is executed.\n' - '\n' - ' * Otherwise, the next "case_block" is attempted as described ' - 'above.\n' - '\n' - ' * If there are no further case blocks, the match statement ' - 'is\n' - ' completed.\n' - '\n' - 'Note:\n' - '\n' - ' Users should generally never rely on a pattern being ' - 'evaluated.\n' - ' Depending on implementation, the interpreter may cache values ' - 'or use\n' - ' other optimizations which skip repeated evaluations.\n' - '\n' - 'A sample match statement:\n' - '\n' - ' >>> flag = False\n' - ' >>> match (100, 200):\n' - ' ... case (100, 300): # Mismatch: 200 != 300\n' - " ... print('Case 1')\n" - ' ... case (100, 200) if flag: # Successful match, but ' - 'guard fails\n' - " ... print('Case 2')\n" - ' ... case (100, y): # Matches and binds y to 200\n' - " ... print(f'Case 3, y: {y}')\n" - ' ... case _: # Pattern not attempted\n' - " ... print('Case 4, I match anything!')\n" - ' ...\n' - ' Case 3, y: 200\n' - '\n' - 'In this case, "if flag" is a guard. Read more about that in the ' - 'next\n' - 'section.\n' - '\n' - '\n' - 'Guards\n' - '------\n' - '\n' - ' guard ::= "if" named_expression\n' - '\n' - 'A "guard" (which is part of the "case") must succeed for code ' - 'inside\n' - 'the "case" block to execute. It takes the form: "if" followed ' - 'by an\n' - 'expression.\n' - '\n' - 'The logical flow of a "case" block with a "guard" follows:\n' - '\n' - '1. Check that the pattern in the "case" block succeeded. If ' - 'the\n' - ' pattern failed, the "guard" is not evaluated and the next ' - '"case"\n' - ' block is checked.\n' - '\n' - '2. If the pattern succeeded, evaluate the "guard".\n' - '\n' - ' * If the "guard" condition evaluates as true, the case block ' - 'is\n' - ' selected.\n' - '\n' - ' * If the "guard" condition evaluates as false, the case block ' - 'is\n' - ' not selected.\n' - '\n' - ' * If the "guard" raises an exception during evaluation, the\n' - ' exception bubbles up.\n' - '\n' - 'Guards are allowed to have side effects as they are ' - 'expressions.\n' - 'Guard evaluation must proceed from the first to the last case ' - 'block,\n' - 'one at a time, skipping case blocks whose pattern(s) don’t all\n' - 'succeed. (I.e., guard evaluation must happen in order.) Guard\n' - 'evaluation must stop once a case block is selected.\n' - '\n' - '\n' - 'Irrefutable Case Blocks\n' - '-----------------------\n' - '\n' - 'An irrefutable case block is a match-all case block. A match\n' - 'statement may have at most one irrefutable case block, and it ' - 'must be\n' - 'last.\n' - '\n' - 'A case block is considered irrefutable if it has no guard and ' - 'its\n' - 'pattern is irrefutable. A pattern is considered irrefutable if ' - 'we can\n' - 'prove from its syntax alone that it will always succeed. Only ' - 'the\n' - 'following patterns are irrefutable:\n' - '\n' - '* AS Patterns whose left-hand side is irrefutable\n' - '\n' - '* OR Patterns containing at least one irrefutable pattern\n' - '\n' - '* Capture Patterns\n' - '\n' - '* Wildcard Patterns\n' - '\n' - '* parenthesized irrefutable patterns\n' - '\n' - '\n' - 'Patterns\n' - '--------\n' - '\n' - 'Note:\n' - '\n' - ' This section uses grammar notations beyond standard EBNF:\n' - '\n' - ' * the notation "SEP.RULE+" is shorthand for "RULE (SEP ' - 'RULE)*"\n' - '\n' - ' * the notation "!RULE" is shorthand for a negative lookahead\n' - ' assertion\n' - '\n' - 'The top-level syntax for "patterns" is:\n' - '\n' - ' patterns ::= open_sequence_pattern | pattern\n' - ' pattern ::= as_pattern | or_pattern\n' - ' closed_pattern ::= | literal_pattern\n' - ' | capture_pattern\n' - ' | wildcard_pattern\n' - ' | value_pattern\n' - ' | group_pattern\n' - ' | sequence_pattern\n' - ' | mapping_pattern\n' - ' | class_pattern\n' - '\n' - 'The descriptions below will include a description “in simple ' - 'terms†of\n' - 'what a pattern does for illustration purposes (credits to ' - 'Raymond\n' - 'Hettinger for a document that inspired most of the ' - 'descriptions). Note\n' - 'that these descriptions are purely for illustration purposes and ' - '**may\n' - 'not** reflect the underlying implementation. Furthermore, they ' - 'do not\n' - 'cover all valid forms.\n' - '\n' - '\n' - 'OR Patterns\n' - '~~~~~~~~~~~\n' - '\n' - 'An OR pattern is two or more patterns separated by vertical bars ' - '"|".\n' - 'Syntax:\n' - '\n' - ' or_pattern ::= "|".closed_pattern+\n' - '\n' - 'Only the final subpattern may be irrefutable, and each ' - 'subpattern must\n' - 'bind the same set of names to avoid ambiguity.\n' - '\n' - 'An OR pattern matches each of its subpatterns in turn to the ' - 'subject\n' - 'value, until one succeeds. The OR pattern is then considered\n' - 'successful. Otherwise, if none of the subpatterns succeed, the ' - 'OR\n' - 'pattern fails.\n' - '\n' - 'In simple terms, "P1 | P2 | ..." will try to match "P1", if it ' - 'fails\n' - 'it will try to match "P2", succeeding immediately if any ' - 'succeeds,\n' - 'failing otherwise.\n' - '\n' - '\n' - 'AS Patterns\n' - '~~~~~~~~~~~\n' - '\n' - 'An AS pattern matches an OR pattern on the left of the "as" ' - 'keyword\n' - 'against a subject. Syntax:\n' - '\n' - ' as_pattern ::= or_pattern "as" capture_pattern\n' - '\n' - 'If the OR pattern fails, the AS pattern fails. Otherwise, the ' - 'AS\n' - 'pattern binds the subject to the name on the right of the as ' - 'keyword\n' - 'and succeeds. "capture_pattern" cannot be a "_".\n' - '\n' - 'In simple terms "P as NAME" will match with "P", and on success ' - 'it\n' - 'will set "NAME = ".\n' - '\n' - '\n' - 'Literal Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A literal pattern corresponds to most literals in Python. ' - 'Syntax:\n' - '\n' - ' literal_pattern ::= signed_number\n' - ' | signed_number "+" NUMBER\n' - ' | signed_number "-" NUMBER\n' - ' | strings\n' - ' | "None"\n' - ' | "True"\n' - ' | "False"\n' - ' signed_number ::= ["-"] NUMBER\n' - '\n' - 'The rule "strings" and the token "NUMBER" are defined in the ' - 'standard\n' - 'Python grammar. Triple-quoted strings are supported. Raw ' - 'strings and\n' - 'byte strings are supported. f-strings are not supported.\n' - '\n' - 'The forms "signed_number \'+\' NUMBER" and "signed_number \'-\' ' - 'NUMBER"\n' - 'are for expressing complex numbers; they require a real number ' - 'on the\n' - 'left and an imaginary number on the right. E.g. "3 + 4j".\n' - '\n' - 'In simple terms, "LITERAL" will succeed only if " ==\n' - 'LITERAL". For the singletons "None", "True" and "False", the ' - '"is"\n' - 'operator is used.\n' - '\n' - '\n' - 'Capture Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A capture pattern binds the subject value to a name. Syntax:\n' - '\n' - " capture_pattern ::= !'_' NAME\n" - '\n' - 'A single underscore "_" is not a capture pattern (this is what ' - '"!\'_\'"\n' - 'expresses). It is instead treated as a "wildcard_pattern".\n' - '\n' - 'In a given pattern, a given name can only be bound once. E.g. ' - '"case\n' - 'x, x: ..." is invalid while "case [x] | x: ..." is allowed.\n' - '\n' - 'Capture patterns always succeed. The binding follows scoping ' - 'rules\n' - 'established by the assignment expression operator in **PEP ' - '572**; the\n' - 'name becomes a local variable in the closest containing function ' - 'scope\n' - 'unless there’s an applicable "global" or "nonlocal" statement.\n' - '\n' - 'In simple terms "NAME" will always succeed and it will set "NAME ' - '=\n' - '".\n' - '\n' - '\n' - 'Wildcard Patterns\n' - '~~~~~~~~~~~~~~~~~\n' - '\n' - 'A wildcard pattern always succeeds (matches anything) and binds ' - 'no\n' - 'name. Syntax:\n' - '\n' - " wildcard_pattern ::= '_'\n" - '\n' - '"_" is a soft keyword within any pattern, but only within ' - 'patterns.\n' - 'It is an identifier, as usual, even within "match" subject\n' - 'expressions, "guard"s, and "case" blocks.\n' - '\n' - 'In simple terms, "_" will always succeed.\n' - '\n' - '\n' - 'Value Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A value pattern represents a named value in Python. Syntax:\n' - '\n' - ' value_pattern ::= attr\n' - ' attr ::= name_or_attr "." NAME\n' - ' name_or_attr ::= attr | NAME\n' - '\n' - 'The dotted name in the pattern is looked up using standard ' - 'Python name\n' - 'resolution rules. The pattern succeeds if the value found ' - 'compares\n' - 'equal to the subject value (using the "==" equality operator).\n' - '\n' - 'In simple terms "NAME1.NAME2" will succeed only if " ' - '==\n' - 'NAME1.NAME2"\n' - '\n' - 'Note:\n' - '\n' - ' If the same value occurs multiple times in the same match ' - 'statement,\n' - ' the interpreter may cache the first value found and reuse it ' - 'rather\n' - ' than repeat the same lookup. This cache is strictly tied to a ' - 'given\n' - ' execution of a given match statement.\n' - '\n' - '\n' - 'Group Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A group pattern allows users to add parentheses around patterns ' - 'to\n' - 'emphasize the intended grouping. Otherwise, it has no ' - 'additional\n' - 'syntax. Syntax:\n' - '\n' - ' group_pattern ::= "(" pattern ")"\n' - '\n' - 'In simple terms "(P)" has the same effect as "P".\n' - '\n' - '\n' - 'Sequence Patterns\n' - '~~~~~~~~~~~~~~~~~\n' - '\n' - 'A sequence pattern contains several subpatterns to be matched ' - 'against\n' - 'sequence elements. The syntax is similar to the unpacking of a ' - 'list or\n' - 'tuple.\n' - '\n' - ' sequence_pattern ::= "[" [maybe_sequence_pattern] "]"\n' - ' | "(" [open_sequence_pattern] ")"\n' - ' open_sequence_pattern ::= maybe_star_pattern "," ' - '[maybe_sequence_pattern]\n' - ' maybe_sequence_pattern ::= ",".maybe_star_pattern+ ","?\n' - ' maybe_star_pattern ::= star_pattern | pattern\n' - ' star_pattern ::= "*" (capture_pattern | ' - 'wildcard_pattern)\n' - '\n' - 'There is no difference if parentheses or square brackets are ' - 'used for\n' - 'sequence patterns (i.e. "(...)" vs "[...]" ).\n' - '\n' - 'Note:\n' - '\n' - ' A single pattern enclosed in parentheses without a trailing ' - 'comma\n' - ' (e.g. "(3 | 4)") is a group pattern. While a single pattern ' - 'enclosed\n' - ' in square brackets (e.g. "[3 | 4]") is still a sequence ' - 'pattern.\n' - '\n' - 'At most one star subpattern may be in a sequence pattern. The ' - 'star\n' - 'subpattern may occur in any position. If no star subpattern is\n' - 'present, the sequence pattern is a fixed-length sequence ' - 'pattern;\n' - 'otherwise it is a variable-length sequence pattern.\n' - '\n' - 'The following is the logical flow for matching a sequence ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If the subject value is not a sequence [2], the sequence ' - 'pattern\n' - ' fails.\n' - '\n' - '2. If the subject value is an instance of "str", "bytes" or\n' - ' "bytearray" the sequence pattern fails.\n' - '\n' - '3. The subsequent steps depend on whether the sequence pattern ' - 'is\n' - ' fixed or variable-length.\n' - '\n' - ' If the sequence pattern is fixed-length:\n' - '\n' - ' 1. If the length of the subject sequence is not equal to the ' - 'number\n' - ' of subpatterns, the sequence pattern fails\n' - '\n' - ' 2. Subpatterns in the sequence pattern are matched to their\n' - ' corresponding items in the subject sequence from left to ' - 'right.\n' - ' Matching stops as soon as a subpattern fails. If all\n' - ' subpatterns succeed in matching their corresponding item, ' - 'the\n' - ' sequence pattern succeeds.\n' - '\n' - ' Otherwise, if the sequence pattern is variable-length:\n' - '\n' - ' 1. If the length of the subject sequence is less than the ' - 'number of\n' - ' non-star subpatterns, the sequence pattern fails.\n' - '\n' - ' 2. The leading non-star subpatterns are matched to their\n' - ' corresponding items as for fixed-length sequences.\n' - '\n' - ' 3. If the previous step succeeds, the star subpattern matches ' - 'a\n' - ' list formed of the remaining subject items, excluding the\n' - ' remaining items corresponding to non-star subpatterns ' - 'following\n' - ' the star subpattern.\n' - '\n' - ' 4. Remaining non-star subpatterns are matched to their\n' - ' corresponding subject items, as for a fixed-length ' - 'sequence.\n' - '\n' - ' Note:\n' - '\n' - ' The length of the subject sequence is obtained via "len()" ' - '(i.e.\n' - ' via the "__len__()" protocol). This length may be cached ' - 'by the\n' - ' interpreter in a similar manner as value patterns.\n' - '\n' - 'In simple terms "[P1, P2, P3," … ", P]" matches only if all ' - 'the\n' - 'following happens:\n' - '\n' - '* check "" is a sequence\n' - '\n' - '* "len(subject) == "\n' - '\n' - '* "P1" matches "[0]" (note that this match can also ' - 'bind\n' - ' names)\n' - '\n' - '* "P2" matches "[1]" (note that this match can also ' - 'bind\n' - ' names)\n' - '\n' - '* … and so on for the corresponding pattern/element.\n' - '\n' - '\n' - 'Mapping Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A mapping pattern contains one or more key-value patterns. The ' - 'syntax\n' - 'is similar to the construction of a dictionary. Syntax:\n' - '\n' - ' mapping_pattern ::= "{" [items_pattern] "}"\n' - ' items_pattern ::= ",".key_value_pattern+ ","?\n' - ' key_value_pattern ::= (literal_pattern | value_pattern) ":" ' - 'pattern\n' - ' | double_star_pattern\n' - ' double_star_pattern ::= "**" capture_pattern\n' - '\n' - 'At most one double star pattern may be in a mapping pattern. ' - 'The\n' - 'double star pattern must be the last subpattern in the mapping\n' - 'pattern.\n' - '\n' - 'Duplicate keys in mapping patterns are disallowed. Duplicate ' - 'literal\n' - 'keys will raise a "SyntaxError". Two keys that otherwise have ' - 'the same\n' - 'value will raise a "ValueError" at runtime.\n' - '\n' - 'The following is the logical flow for matching a mapping ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If the subject value is not a mapping [3],the mapping ' - 'pattern\n' - ' fails.\n' - '\n' - '2. If every key given in the mapping pattern is present in the ' - 'subject\n' - ' mapping, and the pattern for each key matches the ' - 'corresponding\n' - ' item of the subject mapping, the mapping pattern succeeds.\n' - '\n' - '3. If duplicate keys are detected in the mapping pattern, the ' - 'pattern\n' - ' is considered invalid. A "SyntaxError" is raised for ' - 'duplicate\n' - ' literal values; or a "ValueError" for named keys of the same ' - 'value.\n' - '\n' - 'Note:\n' - '\n' - ' Key-value pairs are matched using the two-argument form of ' - 'the\n' - ' mapping subject’s "get()" method. Matched key-value pairs ' - 'must\n' - ' already be present in the mapping, and not created on-the-fly ' - 'via\n' - ' "__missing__()" or "__getitem__()".\n' - '\n' - 'In simple terms "{KEY1: P1, KEY2: P2, ... }" matches only if all ' - 'the\n' - 'following happens:\n' - '\n' - '* check "" is a mapping\n' - '\n' - '* "KEY1 in "\n' - '\n' - '* "P1" matches "[KEY1]"\n' - '\n' - '* … and so on for the corresponding KEY/pattern pair.\n' - '\n' - '\n' - 'Class Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A class pattern represents a class and its positional and ' - 'keyword\n' - 'arguments (if any). Syntax:\n' - '\n' - ' class_pattern ::= name_or_attr "(" [pattern_arguments ' - '","?] ")"\n' - ' pattern_arguments ::= positional_patterns ["," ' - 'keyword_patterns]\n' - ' | keyword_patterns\n' - ' positional_patterns ::= ",".pattern+\n' - ' keyword_patterns ::= ",".keyword_pattern+\n' - ' keyword_pattern ::= NAME "=" pattern\n' - '\n' - 'The same keyword should not be repeated in class patterns.\n' - '\n' - 'The following is the logical flow for matching a class pattern ' - 'against\n' - 'a subject value:\n' - '\n' - '1. If "name_or_attr" is not an instance of the builtin "type" , ' - 'raise\n' - ' "TypeError".\n' - '\n' - '2. If the subject value is not an instance of "name_or_attr" ' - '(tested\n' - ' via "isinstance()"), the class pattern fails.\n' - '\n' - '3. If no pattern arguments are present, the pattern succeeds.\n' - ' Otherwise, the subsequent steps depend on whether keyword or\n' - ' positional argument patterns are present.\n' - '\n' - ' For a number of built-in types (specified below), a single\n' - ' positional subpattern is accepted which will match the ' - 'entire\n' - ' subject; for these types keyword patterns also work as for ' - 'other\n' - ' types.\n' - '\n' - ' If only keyword patterns are present, they are processed as\n' - ' follows, one by one:\n' - '\n' - ' I. The keyword is looked up as an attribute on the subject.\n' - '\n' - ' * If this raises an exception other than "AttributeError", ' - 'the\n' - ' exception bubbles up.\n' - '\n' - ' * If this raises "AttributeError", the class pattern has ' - 'failed.\n' - '\n' - ' * Else, the subpattern associated with the keyword pattern ' - 'is\n' - ' matched against the subject’s attribute value. If this ' - 'fails,\n' - ' the class pattern fails; if this succeeds, the match ' - 'proceeds\n' - ' to the next keyword.\n' - '\n' - ' II. If all keyword patterns succeed, the class pattern ' - 'succeeds.\n' - '\n' - ' If any positional patterns are present, they are converted ' - 'to\n' - ' keyword patterns using the "__match_args__" attribute on the ' - 'class\n' - ' "name_or_attr" before matching:\n' - '\n' - ' I. The equivalent of "getattr(cls, "__match_args__", ())" is\n' - ' called.\n' - '\n' - ' * If this raises an exception, the exception bubbles up.\n' - '\n' - ' * If the returned value is not a tuple, the conversion ' - 'fails and\n' - ' "TypeError" is raised.\n' - '\n' - ' * If there are more positional patterns than\n' - ' "len(cls.__match_args__)", "TypeError" is raised.\n' - '\n' - ' * Otherwise, positional pattern "i" is converted to a ' - 'keyword\n' - ' pattern using "__match_args__[i]" as the keyword.\n' - ' "__match_args__[i]" must be a string; if not "TypeError" ' - 'is\n' - ' raised.\n' - '\n' - ' * If there are duplicate keywords, "TypeError" is raised.\n' - '\n' - ' See also:\n' - '\n' - ' Customizing positional arguments in class pattern ' - 'matching\n' - '\n' - ' II. Once all positional patterns have been converted to ' - 'keyword\n' - ' patterns,\n' - ' the match proceeds as if there were only keyword ' - 'patterns.\n' - '\n' - ' For the following built-in types the handling of positional\n' - ' subpatterns is different:\n' - '\n' - ' * "bool"\n' - '\n' - ' * "bytearray"\n' - '\n' - ' * "bytes"\n' - '\n' - ' * "dict"\n' - '\n' - ' * "float"\n' - '\n' - ' * "frozenset"\n' - '\n' - ' * "int"\n' - '\n' - ' * "list"\n' - '\n' - ' * "set"\n' - '\n' - ' * "str"\n' - '\n' - ' * "tuple"\n' - '\n' - ' These classes accept a single positional argument, and the ' - 'pattern\n' - ' there is matched against the whole object rather than an ' - 'attribute.\n' - ' For example "int(0|1)" matches the value "0", but not the ' - 'value\n' - ' "0.0".\n' - '\n' - 'In simple terms "CLS(P1, attr=P2)" matches only if the ' - 'following\n' - 'happens:\n' - '\n' - '* "isinstance(, CLS)"\n' - '\n' - '* convert "P1" to a keyword pattern using "CLS.__match_args__"\n' - '\n' - '* For each keyword argument "attr=P2":\n' - '\n' - ' * "hasattr(, "attr")"\n' - '\n' - ' * "P2" matches ".attr"\n' - '\n' - '* … and so on for the corresponding keyword argument/pattern ' - 'pair.\n' - '\n' - 'See also:\n' - '\n' - ' * **PEP 634** – Structural Pattern Matching: Specification\n' - '\n' - ' * **PEP 636** – Structural Pattern Matching: Tutorial\n' - '\n' - '\n' - 'Function definitions\n' - '====================\n' - '\n' - 'A function definition defines a user-defined function object ' - '(see\n' - 'section The standard type hierarchy):\n' - '\n' - ' funcdef ::= [decorators] "def" funcname ' - '[type_params] "(" [parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - ' decorators ::= decorator+\n' - ' decorator ::= "@" assignment_expression ' - 'NEWLINE\n' - ' parameter_list ::= defparameter ("," ' - 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n' - ' | parameter_list_no_posonly\n' - ' parameter_list_no_posonly ::= defparameter ("," ' - 'defparameter)* ["," [parameter_list_starargs]]\n' - ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [star_parameter] ("," ' - 'defparameter)* ["," ["**" parameter [","]]]\n' - ' | "**" parameter [","]\n' - ' parameter ::= identifier [":" expression]\n' - ' star_parameter ::= identifier [":" ["*"] ' - 'expression]\n' - ' defparameter ::= parameter ["=" expression]\n' - ' funcname ::= identifier\n' - '\n' - 'A function definition is an executable statement. Its execution ' - 'binds\n' - 'the function name in the current local namespace to a function ' - 'object\n' - '(a wrapper around the executable code for the function). This\n' - 'function object contains a reference to the current global ' - 'namespace\n' - 'as the global namespace to be used when the function is called.\n' - '\n' - 'The function definition does not execute the function body; this ' - 'gets\n' - 'executed only when the function is called. [4]\n' - '\n' - 'A function definition may be wrapped by one or more *decorator*\n' - 'expressions. Decorator expressions are evaluated when the ' - 'function is\n' - 'defined, in the scope that contains the function definition. ' - 'The\n' - 'result must be a callable, which is invoked with the function ' - 'object\n' - 'as the only argument. The returned value is bound to the ' - 'function name\n' - 'instead of the function object. Multiple decorators are applied ' - 'in\n' - 'nested fashion. For example, the following code\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' def func(): pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' def func(): pass\n' - ' func = f1(arg)(f2(func))\n' - '\n' - 'except that the original function is not temporarily bound to ' - 'the name\n' - '"func".\n' - '\n' - 'Changed in version 3.9: Functions may be decorated with any ' - 'valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'between the\n' - 'function’s name and the opening parenthesis for its parameter ' - 'list.\n' - 'This indicates to static type checkers that the function is ' - 'generic.\n' - 'At runtime, the type parameters can be retrieved from the ' - 'function’s\n' - '"__type_params__" attribute. See Generic functions for more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - 'When one or more *parameters* have the form *parameter* "="\n' - '*expression*, the function is said to have “default parameter ' - 'values.â€\n' - 'For a parameter with a default value, the corresponding ' - '*argument* may\n' - 'be omitted from a call, in which case the parameter’s default ' - 'value is\n' - 'substituted. If a parameter has a default value, all following\n' - 'parameters up until the “"*"†must also have a default value — ' - 'this is\n' - 'a syntactic restriction that is not expressed by the grammar.\n' - '\n' - '**Default parameter values are evaluated from left to right when ' - 'the\n' - 'function definition is executed.** This means that the ' - 'expression is\n' - 'evaluated once, when the function is defined, and that the same ' - '“pre-\n' - 'computed†value is used for each call. This is especially ' - 'important\n' - 'to understand when a default parameter value is a mutable ' - 'object, such\n' - 'as a list or a dictionary: if the function modifies the object ' - '(e.g.\n' - 'by appending an item to a list), the default parameter value is ' - 'in\n' - 'effect modified. This is generally not what was intended. A ' - 'way\n' - 'around this is to use "None" as the default, and explicitly test ' - 'for\n' - 'it in the body of the function, e.g.:\n' - '\n' - ' def whats_on_the_telly(penguin=None):\n' - ' if penguin is None:\n' - ' penguin = []\n' - ' penguin.append("property of the zoo")\n' - ' return penguin\n' - '\n' - 'Function call semantics are described in more detail in section ' - 'Calls.\n' - 'A function call always assigns values to all parameters ' - 'mentioned in\n' - 'the parameter list, either from positional arguments, from ' - 'keyword\n' - 'arguments, or from default values. If the form “"*identifier"†' - 'is\n' - 'present, it is initialized to a tuple receiving any excess ' - 'positional\n' - 'parameters, defaulting to the empty tuple. If the form\n' - '“"**identifier"†is present, it is initialized to a new ordered\n' - 'mapping receiving any excess keyword arguments, defaulting to a ' - 'new\n' - 'empty mapping of the same type. Parameters after “"*"†or\n' - '“"*identifier"†are keyword-only parameters and may only be ' - 'passed by\n' - 'keyword arguments. Parameters before “"/"†are positional-only\n' - 'parameters and may only be passed by positional arguments.\n' - '\n' - 'Changed in version 3.8: The "/" function parameter syntax may be ' - 'used\n' - 'to indicate positional-only parameters. See **PEP 570** for ' - 'details.\n' - '\n' - 'Parameters may have an *annotation* of the form “": ' - 'expression"â€\n' - 'following the parameter name. Any parameter may have an ' - 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". (As a ' - 'special\n' - 'case, parameters of the form "*identifier" may have an ' - 'annotation “":\n' - '*expression"â€.) Functions may have “return†annotation of the ' - 'form\n' - '“"-> expression"†after the parameter list. These annotations ' - 'can be\n' - 'any valid Python expression. The presence of annotations does ' - 'not\n' - 'change the semantics of a function. See Annotations for more\n' - 'information on annotations.\n' - '\n' - 'Changed in version 3.11: Parameters of the form “"*identifier"†' - 'may\n' - 'have an annotation “": *expression"â€. See **PEP 646**.\n' - '\n' - 'It is also possible to create anonymous functions (functions not ' - 'bound\n' - 'to a name), for immediate use in expressions. This uses lambda\n' - 'expressions, described in section Lambdas. Note that the ' - 'lambda\n' - 'expression is merely a shorthand for a simplified function ' - 'definition;\n' - 'a function defined in a “"def"†statement can be passed around ' - 'or\n' - 'assigned to another name just like a function defined by a ' - 'lambda\n' - 'expression. The “"def"†form is actually more powerful since ' - 'it\n' - 'allows the execution of multiple statements and annotations.\n' - '\n' - '**Programmer’s note:** Functions are first-class objects. A ' - '“"def"â€\n' - 'statement executed inside a function definition defines a local\n' - 'function that can be returned or passed around. Free variables ' - 'used\n' - 'in the nested function can access the local variables of the ' - 'function\n' - 'containing the def. See section Naming and binding for ' - 'details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3107** - Function Annotations\n' - ' The original specification for function annotations.\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Definition of a standard meaning for annotations: type ' - 'hints.\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' Ability to type hint variable declarations, including ' - 'class\n' - ' variables and instance variables.\n' - '\n' - ' **PEP 563** - Postponed Evaluation of Annotations\n' - ' Support for forward references within annotations by ' - 'preserving\n' - ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n' - '\n' - ' **PEP 318** - Decorators for Functions and Methods\n' - ' Function and method decorators were introduced. Class ' - 'decorators\n' - ' were introduced in **PEP 3129**.\n' - '\n' - '\n' - 'Class definitions\n' - '=================\n' - '\n' - 'A class definition defines a class object (see section The ' - 'standard\n' - 'type hierarchy):\n' - '\n' - ' classdef ::= [decorators] "class" classname [type_params] ' - '[inheritance] ":" suite\n' - ' inheritance ::= "(" [argument_list] ")"\n' - ' classname ::= identifier\n' - '\n' - 'A class definition is an executable statement. The inheritance ' - 'list\n' - 'usually gives a list of base classes (see Metaclasses for more\n' - 'advanced uses), so each item in the list should evaluate to a ' - 'class\n' - 'object which allows subclassing. Classes without an inheritance ' - 'list\n' - 'inherit, by default, from the base class "object"; hence,\n' - '\n' - ' class Foo:\n' - ' pass\n' - '\n' - 'is equivalent to\n' - '\n' - ' class Foo(object):\n' - ' pass\n' - '\n' - 'The class’s suite is then executed in a new execution frame ' - '(see\n' - 'Naming and binding), using a newly created local namespace and ' - 'the\n' - 'original global namespace. (Usually, the suite contains mostly\n' - 'function definitions.) When the class’s suite finishes ' - 'execution, its\n' - 'execution frame is discarded but its local namespace is saved. ' - '[5] A\n' - 'class object is then created using the inheritance list for the ' - 'base\n' - 'classes and the saved local namespace for the attribute ' - 'dictionary.\n' - 'The class name is bound to this class object in the original ' - 'local\n' - 'namespace.\n' - '\n' - 'The order in which attributes are defined in the class body is\n' - 'preserved in the new class’s "__dict__". Note that this is ' - 'reliable\n' - 'only right after the class is created and only for classes that ' - 'were\n' - 'defined using the definition syntax.\n' - '\n' - 'Class creation can be customized heavily using metaclasses.\n' - '\n' - 'Classes can also be decorated: just like when decorating ' - 'functions,\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' class Foo: pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' class Foo: pass\n' - ' Foo = f1(arg)(f2(Foo))\n' - '\n' - 'The evaluation rules for the decorator expressions are the same ' - 'as for\n' - 'function decorators. The result is then bound to the class ' - 'name.\n' - '\n' - 'Changed in version 3.9: Classes may be decorated with any valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'immediately\n' - 'after the class’s name. This indicates to static type checkers ' - 'that\n' - 'the class is generic. At runtime, the type parameters can be ' - 'retrieved\n' - 'from the class’s "__type_params__" attribute. See Generic ' - 'classes for\n' - 'more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - '**Programmer’s note:** Variables defined in the class definition ' - 'are\n' - 'class attributes; they are shared by instances. Instance ' - 'attributes\n' - 'can be set in a method with "self.name = value". Both class ' - 'and\n' - 'instance attributes are accessible through the notation ' - '“"self.name"â€,\n' - 'and an instance attribute hides a class attribute with the same ' - 'name\n' - 'when accessed in this way. Class attributes can be used as ' - 'defaults\n' - 'for instance attributes, but using mutable values there can lead ' - 'to\n' - 'unexpected results. Descriptors can be used to create instance\n' - 'variables with different implementation details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' The proposal that changed the declaration of metaclasses to ' - 'the\n' - ' current syntax, and the semantics for how classes with\n' - ' metaclasses are constructed.\n' - '\n' - ' **PEP 3129** - Class Decorators\n' - ' The proposal that added class decorators. Function and ' - 'method\n' - ' decorators were introduced in **PEP 318**.\n' - '\n' - '\n' - 'Coroutines\n' - '==========\n' - '\n' - 'Added in version 3.5.\n' - '\n' - '\n' - 'Coroutine function definition\n' - '-----------------------------\n' - '\n' - ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - '\n' - 'Execution of Python coroutines can be suspended and resumed at ' - 'many\n' - 'points (see *coroutine*). "await" expressions, "async for" and ' - '"async\n' - 'with" can only be used in the body of a coroutine function.\n' - '\n' - 'Functions defined with "async def" syntax are always coroutine\n' - 'functions, even if they do not contain "await" or "async" ' - 'keywords.\n' - '\n' - 'It is a "SyntaxError" to use a "yield from" expression inside ' - 'the body\n' - 'of a coroutine function.\n' - '\n' - 'An example of a coroutine function:\n' - '\n' - ' async def func(param1, param2):\n' - ' do_stuff()\n' - ' await some_coroutine()\n' - '\n' - 'Changed in version 3.7: "await" and "async" are now keywords;\n' - 'previously they were only treated as such inside the body of a\n' - 'coroutine function.\n' - '\n' - '\n' - 'The "async for" statement\n' - '-------------------------\n' - '\n' - ' async_for_stmt ::= "async" for_stmt\n' - '\n' - 'An *asynchronous iterable* provides an "__aiter__" method that\n' - 'directly returns an *asynchronous iterator*, which can call\n' - 'asynchronous code in its "__anext__" method.\n' - '\n' - 'The "async for" statement allows convenient iteration over\n' - 'asynchronous iterables.\n' - '\n' - 'The following code:\n' - '\n' - ' async for TARGET in ITER:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'Is semantically equivalent to:\n' - '\n' - ' iter = (ITER)\n' - ' iter = type(iter).__aiter__(iter)\n' - ' running = True\n' - '\n' - ' while running:\n' - ' try:\n' - ' TARGET = await type(iter).__anext__(iter)\n' - ' except StopAsyncIteration:\n' - ' running = False\n' - ' else:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'See also "__aiter__()" and "__anext__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async for" statement outside ' - 'the body\n' - 'of a coroutine function.\n' - '\n' - '\n' - 'The "async with" statement\n' - '--------------------------\n' - '\n' - ' async_with_stmt ::= "async" with_stmt\n' - '\n' - 'An *asynchronous context manager* is a *context manager* that is ' - 'able\n' - 'to suspend execution in its *enter* and *exit* methods.\n' - '\n' - 'The following code:\n' - '\n' - ' async with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' aenter = type(manager).__aenter__\n' - ' aexit = type(manager).__aexit__\n' - ' value = await aenter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not await aexit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' await aexit(manager, None, None, None)\n' - '\n' - 'See also "__aenter__()" and "__aexit__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async with" statement outside ' - 'the\n' - 'body of a coroutine function.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 492** - Coroutines with async and await syntax\n' - ' The proposal that made coroutines a proper standalone ' - 'concept in\n' - ' Python, and added supporting syntax.\n' - '\n' - '\n' - 'Type parameter lists\n' - '====================\n' - '\n' - 'Added in version 3.12.\n' - '\n' - 'Changed in version 3.13: Support for default values was added ' - '(see\n' - '**PEP 696**).\n' - '\n' - ' type_params ::= "[" type_param ("," type_param)* "]"\n' - ' type_param ::= typevar | typevartuple | paramspec\n' - ' typevar ::= identifier (":" expression)? ("=" ' - 'expression)?\n' - ' typevartuple ::= "*" identifier ("=" expression)?\n' - ' paramspec ::= "**" identifier ("=" expression)?\n' - '\n' - 'Functions (including coroutines), classes and type aliases may ' - 'contain\n' - 'a type parameter list:\n' - '\n' - ' def max[T](args: list[T]) -> T:\n' - ' ...\n' - '\n' - ' async def amax[T](args: list[T]) -> T:\n' - ' ...\n' - '\n' - ' class Bag[T]:\n' - ' def __iter__(self) -> Iterator[T]:\n' - ' ...\n' - '\n' - ' def add(self, arg: T) -> None:\n' - ' ...\n' - '\n' - ' type ListOrSet[T] = list[T] | set[T]\n' - '\n' - 'Semantically, this indicates that the function, class, or type ' - 'alias\n' - 'is generic over a type variable. This information is primarily ' - 'used by\n' - 'static type checkers, and at runtime, generic objects behave ' - 'much like\n' - 'their non-generic counterparts.\n' - '\n' - 'Type parameters are declared in square brackets ("[]") ' - 'immediately\n' - 'after the name of the function, class, or type alias. The type\n' - 'parameters are accessible within the scope of the generic ' - 'object, but\n' - 'not elsewhere. Thus, after a declaration "def func[T](): pass", ' - 'the\n' - 'name "T" is not available in the module scope. Below, the ' - 'semantics of\n' - 'generic objects are described with more precision. The scope of ' - 'type\n' - 'parameters is modeled with a special function (technically, an\n' - 'annotation scope) that wraps the creation of the generic ' - 'object.\n' - '\n' - 'Generic functions, classes, and type aliases have a ' - '"__type_params__"\n' - 'attribute listing their type parameters.\n' - '\n' - 'Type parameters come in three kinds:\n' - '\n' - '* "typing.TypeVar", introduced by a plain name (e.g., "T").\n' - ' Semantically, this represents a single type to a type ' - 'checker.\n' - '\n' - '* "typing.TypeVarTuple", introduced by a name prefixed with a ' - 'single\n' - ' asterisk (e.g., "*Ts"). Semantically, this stands for a tuple ' - 'of any\n' - ' number of types.\n' - '\n' - '* "typing.ParamSpec", introduced by a name prefixed with two ' - 'asterisks\n' - ' (e.g., "**P"). Semantically, this stands for the parameters of ' - 'a\n' - ' callable.\n' - '\n' - '"typing.TypeVar" declarations can define *bounds* and ' - '*constraints*\n' - 'with a colon (":") followed by an expression. A single ' - 'expression\n' - 'after the colon indicates a bound (e.g. "T: int"). Semantically, ' - 'this\n' - 'means that the "typing.TypeVar" can only represent types that ' - 'are a\n' - 'subtype of this bound. A parenthesized tuple of expressions ' - 'after the\n' - 'colon indicates a set of constraints (e.g. "T: (str, bytes)"). ' - 'Each\n' - 'member of the tuple should be a type (again, this is not ' - 'enforced at\n' - 'runtime). Constrained type variables can only take on one of the ' - 'types\n' - 'in the list of constraints.\n' - '\n' - 'For "typing.TypeVar"s declared using the type parameter list ' - 'syntax,\n' - 'the bound and constraints are not evaluated when the generic ' - 'object is\n' - 'created, but only when the value is explicitly accessed through ' - 'the\n' - 'attributes "__bound__" and "__constraints__". To accomplish ' - 'this, the\n' - 'bounds or constraints are evaluated in a separate annotation ' - 'scope.\n' - '\n' - '"typing.TypeVarTuple"s and "typing.ParamSpec"s cannot have ' - 'bounds or\n' - 'constraints.\n' - '\n' - 'All three flavors of type parameters can also have a *default ' - 'value*,\n' - 'which is used when the type parameter is not explicitly ' - 'provided. This\n' - 'is added by appending a single equals sign ("=") followed by an\n' - 'expression. Like the bounds and constraints of type variables, ' - 'the\n' - 'default value is not evaluated when the object is created, but ' - 'only\n' - 'when the type parameter’s "__default__" attribute is accessed. ' - 'To this\n' - 'end, the default value is evaluated in a separate annotation ' - 'scope. If\n' - 'no default value is specified for a type parameter, the ' - '"__default__"\n' - 'attribute is set to the special sentinel object ' - '"typing.NoDefault".\n' - '\n' - 'The following example indicates the full set of allowed type ' - 'parameter\n' - 'declarations:\n' - '\n' - ' def overly_generic[\n' - ' SimpleTypeVar,\n' - ' TypeVarWithDefault = int,\n' - ' TypeVarWithBound: int,\n' - ' TypeVarWithConstraints: (str, bytes),\n' - ' *SimpleTypeVarTuple = (int, float),\n' - ' **SimpleParamSpec = (str, bytearray),\n' - ' ](\n' - ' a: SimpleTypeVar,\n' - ' b: TypeVarWithDefault,\n' - ' c: TypeVarWithBound,\n' - ' d: Callable[SimpleParamSpec, TypeVarWithConstraints],\n' - ' *e: SimpleTypeVarTuple,\n' - ' ): ...\n' - '\n' - '\n' - 'Generic functions\n' - '-----------------\n' - '\n' - 'Generic functions are declared as follows:\n' - '\n' - ' def func[T](arg: T): ...\n' - '\n' - 'This syntax is equivalent to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_func():\n' - ' T = typing.TypeVar("T")\n' - ' def func(arg: T): ...\n' - ' func.__type_params__ = (T,)\n' - ' return func\n' - ' func = TYPE_PARAMS_OF_func()\n' - '\n' - 'Here "annotation-def" indicates an annotation scope, which is ' - 'not\n' - 'actually bound to any name at runtime. (One other liberty is ' - 'taken in\n' - 'the translation: the syntax does not go through attribute access ' - 'on\n' - 'the "typing" module, but creates an instance of ' - '"typing.TypeVar"\n' - 'directly.)\n' - '\n' - 'The annotations of generic functions are evaluated within the\n' - 'annotation scope used for declaring the type parameters, but ' - 'the\n' - 'function’s defaults and decorators are not.\n' - '\n' - 'The following example illustrates the scoping rules for these ' - 'cases,\n' - 'as well as for additional flavors of type parameters:\n' - '\n' - ' @decorator\n' - ' def func[T: int, *Ts, **P](*args: *Ts, arg: Callable[P, T] = ' - 'some_default):\n' - ' ...\n' - '\n' - 'Except for the lazy evaluation of the "TypeVar" bound, this is\n' - 'equivalent to:\n' - '\n' - ' DEFAULT_OF_arg = some_default\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_func():\n' - '\n' - ' annotation-def BOUND_OF_T():\n' - ' return int\n' - ' # In reality, BOUND_OF_T() is evaluated only on demand.\n' - ' T = typing.TypeVar("T", bound=BOUND_OF_T())\n' - '\n' - ' Ts = typing.TypeVarTuple("Ts")\n' - ' P = typing.ParamSpec("P")\n' - '\n' - ' def func(*args: *Ts, arg: Callable[P, T] = ' - 'DEFAULT_OF_arg):\n' - ' ...\n' - '\n' - ' func.__type_params__ = (T, Ts, P)\n' - ' return func\n' - ' func = decorator(TYPE_PARAMS_OF_func())\n' - '\n' - 'The capitalized names like "DEFAULT_OF_arg" are not actually ' - 'bound at\n' - 'runtime.\n' - '\n' - '\n' - 'Generic classes\n' - '---------------\n' - '\n' - 'Generic classes are declared as follows:\n' - '\n' - ' class Bag[T]: ...\n' - '\n' - 'This syntax is equivalent to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_Bag():\n' - ' T = typing.TypeVar("T")\n' - ' class Bag(typing.Generic[T]):\n' - ' __type_params__ = (T,)\n' - ' ...\n' - ' return Bag\n' - ' Bag = TYPE_PARAMS_OF_Bag()\n' - '\n' - 'Here again "annotation-def" (not a real keyword) indicates an\n' - 'annotation scope, and the name "TYPE_PARAMS_OF_Bag" is not ' - 'actually\n' - 'bound at runtime.\n' - '\n' - 'Generic classes implicitly inherit from "typing.Generic". The ' - 'base\n' - 'classes and keyword arguments of generic classes are evaluated ' - 'within\n' - 'the type scope for the type parameters, and decorators are ' - 'evaluated\n' - 'outside that scope. This is illustrated by this example:\n' - '\n' - ' @decorator\n' - ' class Bag(Base[T], arg=T): ...\n' - '\n' - 'This is equivalent to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_Bag():\n' - ' T = typing.TypeVar("T")\n' - ' class Bag(Base[T], typing.Generic[T], arg=T):\n' - ' __type_params__ = (T,)\n' - ' ...\n' - ' return Bag\n' - ' Bag = decorator(TYPE_PARAMS_OF_Bag())\n' - '\n' - '\n' - 'Generic type aliases\n' - '--------------------\n' - '\n' - 'The "type" statement can also be used to create a generic type ' - 'alias:\n' - '\n' - ' type ListOrSet[T] = list[T] | set[T]\n' - '\n' - 'Except for the lazy evaluation of the value, this is equivalent ' - 'to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_ListOrSet():\n' - ' T = typing.TypeVar("T")\n' - '\n' - ' annotation-def VALUE_OF_ListOrSet():\n' - ' return list[T] | set[T]\n' - ' # In reality, the value is lazily evaluated\n' - ' return typing.TypeAliasType("ListOrSet", ' - 'VALUE_OF_ListOrSet(), type_params=(T,))\n' - ' ListOrSet = TYPE_PARAMS_OF_ListOrSet()\n' - '\n' - 'Here, "annotation-def" (not a real keyword) indicates an ' - 'annotation\n' - 'scope. The capitalized names like "TYPE_PARAMS_OF_ListOrSet" are ' - 'not\n' - 'actually bound at runtime.\n' - '\n' - '\n' - 'Annotations\n' - '===========\n' - '\n' - 'Changed in version 3.14: Annotations are now lazily evaluated ' - 'by\n' - 'default.\n' - '\n' - 'Variables and function parameters may carry *annotations*, ' - 'created by\n' - 'adding a colon after the name, followed by an expression:\n' - '\n' - ' x: annotation = 1\n' - ' def f(param: annotation): ...\n' - '\n' - 'Functions may also carry a return annotation following an ' - 'arrow:\n' - '\n' - ' def f() -> annotation: ...\n' - '\n' - 'Annotations are conventionally used for *type hints*, but this ' - 'is not\n' - 'enforced by the language, and in general annotations may ' - 'contain\n' - 'arbitrary expressions. The presence of annotations does not ' - 'change the\n' - 'runtime semantics of the code, except if some mechanism is used ' - 'that\n' - 'introspects and uses the annotations (such as "dataclasses" or\n' - '"functools.singledispatch()").\n' - '\n' - 'By default, annotations are lazily evaluated in a annotation ' - 'scope.\n' - 'This means that they are not evaluated when the code containing ' - 'the\n' - 'annotation is evaluated. Instead, the interpreter saves ' - 'information\n' - 'that can be used to evaluate the annotation later if requested. ' - 'The\n' - '"annotationlib" module provides tools for evaluating ' - 'annotations.\n' - '\n' - 'If the future statement "from __future__ import annotations" is\n' - 'present, all annotations are instead stored as strings:\n' - '\n' - ' >>> from __future__ import annotations\n' - ' >>> def f(param: annotation): ...\n' - ' >>> f.__annotations__\n' - " {'param': 'annotation'}\n" - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] The exception is propagated to the invocation stack unless ' - 'there\n' - ' is a "finally" clause which happens to raise another ' - 'exception.\n' - ' That new exception causes the old one to be lost.\n' - '\n' - '[2] In pattern matching, a sequence is defined as one of the\n' - ' following:\n' - '\n' - ' * a class that inherits from "collections.abc.Sequence"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Sequence"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_SEQUENCE" bit\n' - ' set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The following standard library classes are sequences:\n' - '\n' - ' * "array.array"\n' - '\n' - ' * "collections.deque"\n' - '\n' - ' * "list"\n' - '\n' - ' * "memoryview"\n' - '\n' - ' * "range"\n' - '\n' - ' * "tuple"\n' - '\n' - ' Note:\n' - '\n' - ' Subject values of type "str", "bytes", and "bytearray" do ' - 'not\n' - ' match sequence patterns.\n' - '\n' - '[3] In pattern matching, a mapping is defined as one of the ' - 'following:\n' - '\n' - ' * a class that inherits from "collections.abc.Mapping"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Mapping"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_MAPPING" bit\n' - ' set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The standard library classes "dict" and ' - '"types.MappingProxyType"\n' - ' are mappings.\n' - '\n' - '[4] A string literal appearing as the first statement in the ' - 'function\n' - ' body is transformed into the function’s "__doc__" attribute ' - 'and\n' - ' therefore the function’s *docstring*.\n' - '\n' - '[5] A string literal appearing as the first statement in the ' - 'class\n' - ' body is transformed into the namespace’s "__doc__" item and\n' - ' therefore the class’s *docstring*.\n', - 'context-managers': 'With Statement Context Managers\n' - '*******************************\n' - '\n' - 'A *context manager* is an object that defines the ' - 'runtime context to\n' - 'be established when executing a "with" statement. The ' - 'context manager\n' - 'handles the entry into, and the exit from, the desired ' - 'runtime context\n' - 'for the execution of the block of code. Context ' - 'managers are normally\n' - 'invoked using the "with" statement (described in section ' - 'The with\n' - 'statement), but can also be used by directly invoking ' - 'their methods.\n' - '\n' - 'Typical uses of context managers include saving and ' - 'restoring various\n' - 'kinds of global state, locking and unlocking resources, ' - 'closing opened\n' - 'files, etc.\n' - '\n' - 'For more information on context managers, see Context ' - 'Manager Types.\n' - 'The "object" class itself does not provide the context ' - 'manager\n' - 'methods.\n' - '\n' - 'object.__enter__(self)\n' - '\n' - ' Enter the runtime context related to this object. The ' - '"with"\n' - ' statement will bind this method’s return value to the ' - 'target(s)\n' - ' specified in the "as" clause of the statement, if ' - 'any.\n' - '\n' - 'object.__exit__(self, exc_type, exc_value, traceback)\n' - '\n' - ' Exit the runtime context related to this object. The ' - 'parameters\n' - ' describe the exception that caused the context to be ' - 'exited. If the\n' - ' context was exited without an exception, all three ' - 'arguments will\n' - ' be "None".\n' - '\n' - ' If an exception is supplied, and the method wishes to ' - 'suppress the\n' - ' exception (i.e., prevent it from being propagated), ' - 'it should\n' - ' return a true value. Otherwise, the exception will be ' - 'processed\n' - ' normally upon exit from this method.\n' - '\n' - ' Note that "__exit__()" methods should not reraise the ' - 'passed-in\n' - ' exception; this is the caller’s responsibility.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with†statement\n' - ' The specification, background, and examples for the ' - 'Python "with"\n' - ' statement.\n', - 'continue': 'The "continue" statement\n' - '************************\n' - '\n' - ' continue_stmt ::= "continue"\n' - '\n' - '"continue" may only occur syntactically nested in a "for" or ' - '"while"\n' - 'loop, but not nested in a function or class definition within ' - 'that\n' - 'loop. It continues with the next cycle of the nearest enclosing ' - 'loop.\n' - '\n' - 'When "continue" passes control out of a "try" statement with a\n' - '"finally" clause, that "finally" clause is executed before ' - 'really\n' - 'starting the next loop cycle.\n', - 'conversions': 'Arithmetic conversions\n' - '**********************\n' - '\n' - 'When a description of an arithmetic operator below uses the ' - 'phrase\n' - '“the numeric arguments are converted to a common real typeâ€, ' - 'this\n' - 'means that the operator implementation for built-in types ' - 'works as\n' - 'follows:\n' - '\n' - '* If both arguments are complex numbers, no conversion is ' - 'performed;\n' - '\n' - '* if either argument is a complex or a floating-point number, ' - 'the\n' - ' other is converted to a floating-point number;\n' - '\n' - '* otherwise, both must be integers and no conversion is ' - 'necessary.\n' - '\n' - 'Some additional rules apply for certain operators (e.g., a ' - 'string as a\n' - 'left argument to the ‘%’ operator). Extensions must define ' - 'their own\n' - 'conversion behavior.\n', - 'customization': 'Basic customization\n' - '*******************\n' - '\n' - 'object.__new__(cls[, ...])\n' - '\n' - ' Called to create a new instance of class *cls*. ' - '"__new__()" is a\n' - ' static method (special-cased so you need not declare it ' - 'as such)\n' - ' that takes the class of which an instance was requested ' - 'as its\n' - ' first argument. The remaining arguments are those ' - 'passed to the\n' - ' object constructor expression (the call to the class). ' - 'The return\n' - ' value of "__new__()" should be the new object instance ' - '(usually an\n' - ' instance of *cls*).\n' - '\n' - ' Typical implementations create a new instance of the ' - 'class by\n' - ' invoking the superclass’s "__new__()" method using\n' - ' "super().__new__(cls[, ...])" with appropriate arguments ' - 'and then\n' - ' modifying the newly created instance as necessary before ' - 'returning\n' - ' it.\n' - '\n' - ' If "__new__()" is invoked during object construction and ' - 'it returns\n' - ' an instance of *cls*, then the new instance’s ' - '"__init__()" method\n' - ' will be invoked like "__init__(self[, ...])", where ' - '*self* is the\n' - ' new instance and the remaining arguments are the same as ' - 'were\n' - ' passed to the object constructor.\n' - '\n' - ' If "__new__()" does not return an instance of *cls*, ' - 'then the new\n' - ' instance’s "__init__()" method will not be invoked.\n' - '\n' - ' "__new__()" is intended mainly to allow subclasses of ' - 'immutable\n' - ' types (like int, str, or tuple) to customize instance ' - 'creation. It\n' - ' is also commonly overridden in custom metaclasses in ' - 'order to\n' - ' customize class creation.\n' - '\n' - 'object.__init__(self[, ...])\n' - '\n' - ' Called after the instance has been created (by ' - '"__new__()"), but\n' - ' before it is returned to the caller. The arguments are ' - 'those\n' - ' passed to the class constructor expression. If a base ' - 'class has an\n' - ' "__init__()" method, the derived class’s "__init__()" ' - 'method, if\n' - ' any, must explicitly call it to ensure proper ' - 'initialization of the\n' - ' base class part of the instance; for example:\n' - ' "super().__init__([args...])".\n' - '\n' - ' Because "__new__()" and "__init__()" work together in ' - 'constructing\n' - ' objects ("__new__()" to create it, and "__init__()" to ' - 'customize\n' - ' it), no non-"None" value may be returned by ' - '"__init__()"; doing so\n' - ' will cause a "TypeError" to be raised at runtime.\n' - '\n' - 'object.__del__(self)\n' - '\n' - ' Called when the instance is about to be destroyed. This ' - 'is also\n' - ' called a finalizer or (improperly) a destructor. If a ' - 'base class\n' - ' has a "__del__()" method, the derived class’s ' - '"__del__()" method,\n' - ' if any, must explicitly call it to ensure proper ' - 'deletion of the\n' - ' base class part of the instance.\n' - '\n' - ' It is possible (though not recommended!) for the ' - '"__del__()" method\n' - ' to postpone destruction of the instance by creating a ' - 'new reference\n' - ' to it. This is called object *resurrection*. It is\n' - ' implementation-dependent whether "__del__()" is called a ' - 'second\n' - ' time when a resurrected object is about to be destroyed; ' - 'the\n' - ' current *CPython* implementation only calls it once.\n' - '\n' - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' - ' "weakref.finalize" provides a straightforward way to ' - 'register a\n' - ' cleanup function to be called when an object is garbage ' - 'collected.\n' - '\n' - ' Note:\n' - '\n' - ' "del x" doesn’t directly call "x.__del__()" — the ' - 'former\n' - ' decrements the reference count for "x" by one, and the ' - 'latter is\n' - ' only called when "x"’s reference count reaches zero.\n' - '\n' - ' **CPython implementation detail:** It is possible for a ' - 'reference\n' - ' cycle to prevent the reference count of an object from ' - 'going to\n' - ' zero. In this case, the cycle will be later detected ' - 'and deleted\n' - ' by the *cyclic garbage collector*. A common cause of ' - 'reference\n' - ' cycles is when an exception has been caught in a local ' - 'variable.\n' - ' The frame’s locals then reference the exception, which ' - 'references\n' - ' its own traceback, which references the locals of all ' - 'frames caught\n' - ' in the traceback.\n' - '\n' - ' See also: Documentation for the "gc" module.\n' - '\n' - ' Warning:\n' - '\n' - ' Due to the precarious circumstances under which ' - '"__del__()"\n' - ' methods are invoked, exceptions that occur during ' - 'their execution\n' - ' are ignored, and a warning is printed to "sys.stderr" ' - 'instead.\n' - ' In particular:\n' - '\n' - ' * "__del__()" can be invoked when arbitrary code is ' - 'being\n' - ' executed, including from any arbitrary thread. If ' - '"__del__()"\n' - ' needs to take a lock or invoke any other blocking ' - 'resource, it\n' - ' may deadlock as the resource may already be taken by ' - 'the code\n' - ' that gets interrupted to execute "__del__()".\n' - '\n' - ' * "__del__()" can be executed during interpreter ' - 'shutdown. As a\n' - ' consequence, the global variables it needs to access ' - '(including\n' - ' other modules) may already have been deleted or set ' - 'to "None".\n' - ' Python guarantees that globals whose name begins ' - 'with a single\n' - ' underscore are deleted from their module before ' - 'other globals\n' - ' are deleted; if no other references to such globals ' - 'exist, this\n' - ' may help in assuring that imported modules are still ' - 'available\n' - ' at the time when the "__del__()" method is called.\n' - '\n' - 'object.__repr__(self)\n' - '\n' - ' Called by the "repr()" built-in function to compute the ' - '“officialâ€\n' - ' string representation of an object. If at all possible, ' - 'this\n' - ' should look like a valid Python expression that could be ' - 'used to\n' - ' recreate an object with the same value (given an ' - 'appropriate\n' - ' environment). If this is not possible, a string of the ' - 'form\n' - ' "<...some useful description...>" should be returned. ' - 'The return\n' - ' value must be a string object. If a class defines ' - '"__repr__()" but\n' - ' not "__str__()", then "__repr__()" is also used when an ' - '“informalâ€\n' - ' string representation of instances of that class is ' - 'required.\n' - '\n' - ' This is typically used for debugging, so it is important ' - 'that the\n' - ' representation is information-rich and unambiguous. A ' - 'default\n' - ' implementation is provided by the "object" class ' - 'itself.\n' - '\n' - 'object.__str__(self)\n' - '\n' - ' Called by "str(object)", the default "__format__()" ' - 'implementation,\n' - ' and the built-in function "print()", to compute the ' - '“informal†or\n' - ' nicely printable string representation of an object. ' - 'The return\n' - ' value must be a str object.\n' - '\n' - ' This method differs from "object.__repr__()" in that ' - 'there is no\n' - ' expectation that "__str__()" return a valid Python ' - 'expression: a\n' - ' more convenient or concise representation can be used.\n' - '\n' - ' The default implementation defined by the built-in type ' - '"object"\n' - ' calls "object.__repr__()".\n' - '\n' - 'object.__bytes__(self)\n' - '\n' - ' Called by bytes to compute a byte-string representation ' - 'of an\n' - ' object. This should return a "bytes" object. The ' - '"object" class\n' - ' itself does not provide this method.\n' - '\n' - 'object.__format__(self, format_spec)\n' - '\n' - ' Called by the "format()" built-in function, and by ' - 'extension,\n' - ' evaluation of formatted string literals and the ' - '"str.format()"\n' - ' method, to produce a “formatted†string representation ' - 'of an\n' - ' object. The *format_spec* argument is a string that ' - 'contains a\n' - ' description of the formatting options desired. The ' - 'interpretation\n' - ' of the *format_spec* argument is up to the type ' - 'implementing\n' - ' "__format__()", however most classes will either ' - 'delegate\n' - ' formatting to one of the built-in types, or use a ' - 'similar\n' - ' formatting option syntax.\n' - '\n' - ' See Format Specification Mini-Language for a description ' - 'of the\n' - ' standard formatting syntax.\n' - '\n' - ' The return value must be a string object.\n' - '\n' - ' The default implementation by the "object" class should ' - 'be given an\n' - ' empty *format_spec* string. It delegates to ' - '"__str__()".\n' - '\n' - ' Changed in version 3.4: The __format__ method of ' - '"object" itself\n' - ' raises a "TypeError" if passed any non-empty string.\n' - '\n' - ' Changed in version 3.7: "object.__format__(x, \'\')" is ' - 'now\n' - ' equivalent to "str(x)" rather than "format(str(x), ' - '\'\')".\n' - '\n' - 'object.__lt__(self, other)\n' - 'object.__le__(self, other)\n' - 'object.__eq__(self, other)\n' - 'object.__ne__(self, other)\n' - 'object.__gt__(self, other)\n' - 'object.__ge__(self, other)\n' - '\n' - ' These are the so-called “rich comparison†methods. The\n' - ' correspondence between operator symbols and method names ' - 'is as\n' - ' follows: "xy" calls\n' - ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' - '\n' - ' A rich comparison method may return the singleton ' - '"NotImplemented"\n' - ' if it does not implement the operation for a given pair ' - 'of\n' - ' arguments. By convention, "False" and "True" are ' - 'returned for a\n' - ' successful comparison. However, these methods can return ' - 'any value,\n' - ' so if the comparison operator is used in a Boolean ' - 'context (e.g.,\n' - ' in the condition of an "if" statement), Python will call ' - '"bool()"\n' - ' on the value to determine if the result is true or ' - 'false.\n' - '\n' - ' By default, "object" implements "__eq__()" by using ' - '"is", returning\n' - ' "NotImplemented" in the case of a false comparison: ' - '"True if x is y\n' - ' else NotImplemented". For "__ne__()", by default it ' - 'delegates to\n' - ' "__eq__()" and inverts the result unless it is ' - '"NotImplemented".\n' - ' There are no other implied relationships among the ' - 'comparison\n' - ' operators or default implementations; for example, the ' - 'truth of\n' - ' "(x.__hash__".\n' - '\n' - ' If a class that does not override "__eq__()" wishes to ' - 'suppress\n' - ' hash support, it should include "__hash__ = None" in the ' - 'class\n' - ' definition. A class which defines its own "__hash__()" ' - 'that\n' - ' explicitly raises a "TypeError" would be incorrectly ' - 'identified as\n' - ' hashable by an "isinstance(obj, ' - 'collections.abc.Hashable)" call.\n' - '\n' - ' Note:\n' - '\n' - ' By default, the "__hash__()" values of str and bytes ' - 'objects are\n' - ' “salted†with an unpredictable random value. Although ' - 'they\n' - ' remain constant within an individual Python process, ' - 'they are not\n' - ' predictable between repeated invocations of ' - 'Python.This is\n' - ' intended to provide protection against a ' - 'denial-of-service caused\n' - ' by carefully chosen inputs that exploit the worst ' - 'case\n' - ' performance of a dict insertion, *O*(*n*^2) ' - 'complexity. See\n' - ' http://ocert.org/advisories/ocert-2011-003.html for\n' - ' details.Changing hash values affects the iteration ' - 'order of sets.\n' - ' Python has never made guarantees about this ordering ' - '(and it\n' - ' typically varies between 32-bit and 64-bit builds).See ' - 'also\n' - ' "PYTHONHASHSEED".\n' - '\n' - ' Changed in version 3.3: Hash randomization is enabled by ' - 'default.\n' - '\n' - 'object.__bool__(self)\n' - '\n' - ' Called to implement truth value testing and the built-in ' - 'operation\n' - ' "bool()"; should return "False" or "True". When this ' - 'method is not\n' - ' defined, "__len__()" is called, if it is defined, and ' - 'the object is\n' - ' considered true if its result is nonzero. If a class ' - 'defines\n' - ' neither "__len__()" nor "__bool__()" (which is true of ' - 'the "object"\n' - ' class itself), all its instances are considered true.\n', - 'debugger': '"pdb" — The Python Debugger\n' - '***************************\n' - '\n' - '**Source code:** Lib/pdb.py\n' - '\n' - '======================================================================\n' - '\n' - 'The module "pdb" defines an interactive source code debugger ' - 'for\n' - 'Python programs. It supports setting (conditional) breakpoints ' - 'and\n' - 'single stepping at the source line level, inspection of stack ' - 'frames,\n' - 'source code listing, and evaluation of arbitrary Python code in ' - 'the\n' - 'context of any stack frame. It also supports post-mortem ' - 'debugging\n' - 'and can be called under program control.\n' - '\n' - 'The debugger is extensible – it is actually defined as the ' - 'class\n' - '"Pdb". This is currently undocumented but easily understood by ' - 'reading\n' - 'the source. The extension interface uses the modules "bdb" and ' - '"cmd".\n' - '\n' - 'See also:\n' - '\n' - ' Module "faulthandler"\n' - ' Used to dump Python tracebacks explicitly, on a fault, ' - 'after a\n' - ' timeout, or on a user signal.\n' - '\n' - ' Module "traceback"\n' - ' Standard interface to extract, format and print stack ' - 'traces of\n' - ' Python programs.\n' - '\n' - 'The typical usage to break into the debugger is to insert:\n' - '\n' - ' import pdb; pdb.set_trace()\n' - '\n' - 'Or:\n' - '\n' - ' breakpoint()\n' - '\n' - 'at the location you want to break into the debugger, and then ' - 'run the\n' - 'program. You can then step through the code following this ' - 'statement,\n' - 'and continue running without the debugger using the "continue"\n' - 'command.\n' - '\n' - 'Changed in version 3.7: The built-in "breakpoint()", when called ' - 'with\n' - 'defaults, can be used instead of "import pdb; pdb.set_trace()".\n' - '\n' - ' def double(x):\n' - ' breakpoint()\n' - ' return x * 2\n' - ' val = 3\n' - ' print(f"{val} * 2 is {double(val)}")\n' - '\n' - 'The debugger’s prompt is "(Pdb)", which is the indicator that ' - 'you are\n' - 'in debug mode:\n' - '\n' - ' > ...(2)double()\n' - ' -> breakpoint()\n' - ' (Pdb) p x\n' - ' 3\n' - ' (Pdb) continue\n' - ' 3 * 2 is 6\n' - '\n' - 'Changed in version 3.3: Tab-completion via the "readline" module ' - 'is\n' - 'available for commands and command arguments, e.g. the current ' - 'global\n' - 'and local names are offered as arguments of the "p" command.\n' - '\n' - 'You can also invoke "pdb" from the command line to debug other\n' - 'scripts. For example:\n' - '\n' - ' python -m pdb myscript.py\n' - '\n' - 'When invoked as a module, pdb will automatically enter ' - 'post-mortem\n' - 'debugging if the program being debugged exits abnormally. After ' - 'post-\n' - 'mortem debugging (or after normal exit of the program), pdb ' - 'will\n' - 'restart the program. Automatic restarting preserves pdb’s state ' - '(such\n' - 'as breakpoints) and in most cases is more useful than quitting ' - 'the\n' - 'debugger upon program’s exit.\n' - '\n' - 'Changed in version 3.2: Added the "-c" option to execute ' - 'commands as\n' - 'if given in a ".pdbrc" file; see Debugger Commands.\n' - '\n' - 'Changed in version 3.7: Added the "-m" option to execute ' - 'modules\n' - 'similar to the way "python -m" does. As with a script, the ' - 'debugger\n' - 'will pause execution just before the first line of the module.\n' - '\n' - 'Typical usage to execute a statement under control of the ' - 'debugger is:\n' - '\n' - ' >>> import pdb\n' - ' >>> def f(x):\n' - ' ... print(1 / x)\n' - ' >>> pdb.run("f(2)")\n' - ' > (1)()\n' - ' (Pdb) continue\n' - ' 0.5\n' - ' >>>\n' - '\n' - 'The typical usage to inspect a crashed program is:\n' - '\n' - ' >>> import pdb\n' - ' >>> def f(x):\n' - ' ... print(1 / x)\n' - ' ...\n' - ' >>> f(0)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - ' File "", line 2, in f\n' - ' ZeroDivisionError: division by zero\n' - ' >>> pdb.pm()\n' - ' > (2)f()\n' - ' (Pdb) p x\n' - ' 0\n' - ' (Pdb)\n' - '\n' - 'Changed in version 3.13: The implementation of **PEP 667** means ' - 'that\n' - 'name assignments made via "pdb" will immediately affect the ' - 'active\n' - 'scope, even when running inside an *optimized scope*.\n' - '\n' - 'The module defines the following functions; each enters the ' - 'debugger\n' - 'in a slightly different way:\n' - '\n' - 'pdb.run(statement, globals=None, locals=None)\n' - '\n' - ' Execute the *statement* (given as a string or a code object) ' - 'under\n' - ' debugger control. The debugger prompt appears before any ' - 'code is\n' - ' executed; you can set breakpoints and type "continue", or you ' - 'can\n' - ' step through the statement using "step" or "next" (all these\n' - ' commands are explained below). The optional *globals* and ' - '*locals*\n' - ' arguments specify the environment in which the code is ' - 'executed; by\n' - ' default the dictionary of the module "__main__" is used. ' - '(See the\n' - ' explanation of the built-in "exec()" or "eval()" functions.)\n' - '\n' - 'pdb.runeval(expression, globals=None, locals=None)\n' - '\n' - ' Evaluate the *expression* (given as a string or a code ' - 'object)\n' - ' under debugger control. When "runeval()" returns, it returns ' - 'the\n' - ' value of the *expression*. Otherwise this function is ' - 'similar to\n' - ' "run()".\n' - '\n' - 'pdb.runcall(function, *args, **kwds)\n' - '\n' - ' Call the *function* (a function or method object, not a ' - 'string)\n' - ' with the given arguments. When "runcall()" returns, it ' - 'returns\n' - ' whatever the function call returned. The debugger prompt ' - 'appears\n' - ' as soon as the function is entered.\n' - '\n' - 'pdb.set_trace(*, header=None, commands=None)\n' - '\n' - ' Enter the debugger at the calling stack frame. This is ' - 'useful to\n' - ' hard-code a breakpoint at a given point in a program, even if ' - 'the\n' - ' code is not otherwise being debugged (e.g. when an assertion\n' - ' fails). If given, *header* is printed to the console just ' - 'before\n' - ' debugging begins. The *commands* argument, if given, is a ' - 'list of\n' - ' commands to execute when the debugger starts.\n' - '\n' - ' Changed in version 3.7: The keyword-only argument *header*.\n' - '\n' - ' Changed in version 3.13: "set_trace()" will enter the ' - 'debugger\n' - ' immediately, rather than on the next line of code to be ' - 'executed.\n' - '\n' - ' Added in version 3.14: The *commands* argument.\n' - '\n' - 'pdb.post_mortem(traceback=None)\n' - '\n' - ' Enter post-mortem debugging of the given *traceback* object. ' - 'If no\n' - ' *traceback* is given, it uses the one of the exception that ' - 'is\n' - ' currently being handled (an exception must be being handled ' - 'if the\n' - ' default is to be used).\n' - '\n' - 'pdb.pm()\n' - '\n' - ' Enter post-mortem debugging of the exception found in\n' - ' "sys.last_exc".\n' - '\n' - 'The "run*" functions and "set_trace()" are aliases for ' - 'instantiating\n' - 'the "Pdb" class and calling the method of the same name. If you ' - 'want\n' - 'to access further features, you have to do this yourself:\n' - '\n' - "class pdb.Pdb(completekey='tab', stdin=None, stdout=None, " - 'skip=None, nosigint=False, readrc=True, mode=None)\n' - '\n' - ' "Pdb" is the debugger class.\n' - '\n' - ' The *completekey*, *stdin* and *stdout* arguments are passed ' - 'to the\n' - ' underlying "cmd.Cmd" class; see the description there.\n' - '\n' - ' The *skip* argument, if given, must be an iterable of ' - 'glob-style\n' - ' module name patterns. The debugger will not step into frames ' - 'that\n' - ' originate in a module that matches one of these patterns. ' - '[1]\n' - '\n' - ' By default, Pdb sets a handler for the SIGINT signal (which ' - 'is sent\n' - ' when the user presses "Ctrl-C" on the console) when you give ' - 'a\n' - ' "continue" command. This allows you to break into the ' - 'debugger\n' - ' again by pressing "Ctrl-C". If you want Pdb not to touch ' - 'the\n' - ' SIGINT handler, set *nosigint* to true.\n' - '\n' - ' The *readrc* argument defaults to true and controls whether ' - 'Pdb\n' - ' will load .pdbrc files from the filesystem.\n' - '\n' - ' The *mode* argument specifies how the debugger was invoked. ' - 'It\n' - ' impacts the workings of some debugger commands. Valid values ' - 'are\n' - ' "\'inline\'" (used by the breakpoint() builtin), "\'cli\'" ' - '(used by the\n' - ' command line invocation) or "None" (for backwards compatible\n' - ' behaviour, as before the *mode* argument was added).\n' - '\n' - ' Example call to enable tracing with *skip*:\n' - '\n' - " import pdb; pdb.Pdb(skip=['django.*']).set_trace()\n" - '\n' - ' Raises an auditing event "pdb.Pdb" with no arguments.\n' - '\n' - ' Changed in version 3.1: Added the *skip* parameter.\n' - '\n' - ' Changed in version 3.2: Added the *nosigint* parameter. ' - 'Previously,\n' - ' a SIGINT handler was never set by Pdb.\n' - '\n' - ' Changed in version 3.6: The *readrc* argument.\n' - '\n' - ' Added in version 3.14: Added the *mode* argument.\n' - '\n' - ' run(statement, globals=None, locals=None)\n' - ' runeval(expression, globals=None, locals=None)\n' - ' runcall(function, *args, **kwds)\n' - ' set_trace()\n' - '\n' - ' See the documentation for the functions explained above.\n' - '\n' - '\n' - 'Debugger Commands\n' - '=================\n' - '\n' - 'The commands recognized by the debugger are listed below. Most\n' - 'commands can be abbreviated to one or two letters as indicated; ' - 'e.g.\n' - '"h(elp)" means that either "h" or "help" can be used to enter ' - 'the help\n' - 'command (but not "he" or "hel", nor "H" or "Help" or "HELP").\n' - 'Arguments to commands must be separated by whitespace (spaces ' - 'or\n' - 'tabs). Optional arguments are enclosed in square brackets ' - '("[]") in\n' - 'the command syntax; the square brackets must not be typed.\n' - 'Alternatives in the command syntax are separated by a vertical ' - 'bar\n' - '("|").\n' - '\n' - 'Entering a blank line repeats the last command entered. ' - 'Exception: if\n' - 'the last command was a "list" command, the next 11 lines are ' - 'listed.\n' - '\n' - 'Commands that the debugger doesn’t recognize are assumed to be ' - 'Python\n' - 'statements and are executed in the context of the program being\n' - 'debugged. Python statements can also be prefixed with an ' - 'exclamation\n' - 'point ("!"). This is a powerful way to inspect the program ' - 'being\n' - 'debugged; it is even possible to change a variable or call a ' - 'function.\n' - 'When an exception occurs in such a statement, the exception name ' - 'is\n' - 'printed but the debugger’s state is not changed.\n' - '\n' - 'Changed in version 3.13: Expressions/Statements whose prefix is ' - 'a pdb\n' - 'command are now correctly identified and executed.\n' - '\n' - 'The debugger supports aliases. Aliases can have parameters ' - 'which\n' - 'allows one a certain level of adaptability to the context under\n' - 'examination.\n' - '\n' - 'Multiple commands may be entered on a single line, separated by ' - '";;".\n' - '(A single ";" is not used as it is the separator for multiple ' - 'commands\n' - 'in a line that is passed to the Python parser.) No intelligence ' - 'is\n' - 'applied to separating the commands; the input is split at the ' - 'first\n' - '";;" pair, even if it is in the middle of a quoted string. A\n' - 'workaround for strings with double semicolons is to use ' - 'implicit\n' - 'string concatenation "\';\'\';\'" or "";"";"".\n' - '\n' - 'To set a temporary global variable, use a *convenience ' - 'variable*. A\n' - '*convenience variable* is a variable whose name starts with ' - '"$". For\n' - 'example, "$foo = 1" sets a global variable "$foo" which you can ' - 'use in\n' - 'the debugger session. The *convenience variables* are cleared ' - 'when\n' - 'the program resumes execution so it’s less likely to interfere ' - 'with\n' - 'your program compared to using normal variables like "foo = 1".\n' - '\n' - 'There are three preset *convenience variables*:\n' - '\n' - '* "$_frame": the current frame you are debugging\n' - '\n' - '* "$_retval": the return value if the frame is returning\n' - '\n' - '* "$_exception": the exception if the frame is raising an ' - 'exception\n' - '\n' - 'Added in version 3.12: Added the *convenience variable* ' - 'feature.\n' - '\n' - 'If a file ".pdbrc" exists in the user’s home directory or in ' - 'the\n' - 'current directory, it is read with "\'utf-8\'" encoding and ' - 'executed as\n' - 'if it had been typed at the debugger prompt, with the exception ' - 'that\n' - 'empty lines and lines starting with "#" are ignored. This is\n' - 'particularly useful for aliases. If both files exist, the one ' - 'in the\n' - 'home directory is read first and aliases defined there can be\n' - 'overridden by the local file.\n' - '\n' - 'Changed in version 3.2: ".pdbrc" can now contain commands that\n' - 'continue debugging, such as "continue" or "next". Previously, ' - 'these\n' - 'commands had no effect.\n' - '\n' - 'Changed in version 3.11: ".pdbrc" is now read with "\'utf-8\'" ' - 'encoding.\n' - 'Previously, it was read with the system locale encoding.\n' - '\n' - 'h(elp) [command]\n' - '\n' - ' Without argument, print the list of available commands. With ' - 'a\n' - ' *command* as argument, print help about that command. "help ' - 'pdb"\n' - ' displays the full documentation (the docstring of the "pdb"\n' - ' module). Since the *command* argument must be an identifier, ' - '"help\n' - ' exec" must be entered to get help on the "!" command.\n' - '\n' - 'w(here) [count]\n' - '\n' - ' Print a stack trace, with the most recent frame at the ' - 'bottom. if\n' - ' *count* is 0, print the current frame entry. If *count* is\n' - ' negative, print the least recent - *count* frames. If *count* ' - 'is\n' - ' positive, print the most recent *count* frames. An arrow ' - '(">")\n' - ' indicates the current frame, which determines the context of ' - 'most\n' - ' commands.\n' - '\n' - ' Changed in version 3.14: *count* argument is added.\n' - '\n' - 'd(own) [count]\n' - '\n' - ' Move the current frame *count* (default one) levels down in ' - 'the\n' - ' stack trace (to a newer frame).\n' - '\n' - 'u(p) [count]\n' - '\n' - ' Move the current frame *count* (default one) levels up in the ' - 'stack\n' - ' trace (to an older frame).\n' - '\n' - 'b(reak) [([filename:]lineno | function) [, condition]]\n' - '\n' - ' With a *lineno* argument, set a break at line *lineno* in ' - 'the\n' - ' current file. The line number may be prefixed with a ' - '*filename* and\n' - ' a colon, to specify a breakpoint in another file (possibly ' - 'one that\n' - ' hasn’t been loaded yet). The file is searched on ' - '"sys.path".\n' - ' Acceptable forms of *filename* are "/abspath/to/file.py",\n' - ' "relpath/file.py", "module" and "package.module".\n' - '\n' - ' With a *function* argument, set a break at the first ' - 'executable\n' - ' statement within that function. *function* can be any ' - 'expression\n' - ' that evaluates to a function in the current namespace.\n' - '\n' - ' If a second argument is present, it is an expression which ' - 'must\n' - ' evaluate to true before the breakpoint is honored.\n' - '\n' - ' Without argument, list all breaks, including for each ' - 'breakpoint,\n' - ' the number of times that breakpoint has been hit, the ' - 'current\n' - ' ignore count, and the associated condition if any.\n' - '\n' - ' Each breakpoint is assigned a number to which all the other\n' - ' breakpoint commands refer.\n' - '\n' - 'tbreak [([filename:]lineno | function) [, condition]]\n' - '\n' - ' Temporary breakpoint, which is removed automatically when it ' - 'is\n' - ' first hit. The arguments are the same as for "break".\n' - '\n' - 'cl(ear) [filename:lineno | bpnumber ...]\n' - '\n' - ' With a *filename:lineno* argument, clear all the breakpoints ' - 'at\n' - ' this line. With a space separated list of breakpoint numbers, ' - 'clear\n' - ' those breakpoints. Without argument, clear all breaks (but ' - 'first\n' - ' ask confirmation).\n' - '\n' - 'disable bpnumber [bpnumber ...]\n' - '\n' - ' Disable the breakpoints given as a space separated list of\n' - ' breakpoint numbers. Disabling a breakpoint means it cannot ' - 'cause\n' - ' the program to stop execution, but unlike clearing a ' - 'breakpoint, it\n' - ' remains in the list of breakpoints and can be (re-)enabled.\n' - '\n' - 'enable bpnumber [bpnumber ...]\n' - '\n' - ' Enable the breakpoints specified.\n' - '\n' - 'ignore bpnumber [count]\n' - '\n' - ' Set the ignore count for the given breakpoint number. If ' - '*count*\n' - ' is omitted, the ignore count is set to 0. A breakpoint ' - 'becomes\n' - ' active when the ignore count is zero. When non-zero, the ' - '*count*\n' - ' is decremented each time the breakpoint is reached and the\n' - ' breakpoint is not disabled and any associated condition ' - 'evaluates\n' - ' to true.\n' - '\n' - 'condition bpnumber [condition]\n' - '\n' - ' Set a new *condition* for the breakpoint, an expression which ' - 'must\n' - ' evaluate to true before the breakpoint is honored. If ' - '*condition*\n' - ' is absent, any existing condition is removed; i.e., the ' - 'breakpoint\n' - ' is made unconditional.\n' - '\n' - 'commands [bpnumber]\n' - '\n' - ' Specify a list of commands for breakpoint number *bpnumber*. ' - 'The\n' - ' commands themselves appear on the following lines. Type a ' - 'line\n' - ' containing just "end" to terminate the commands. An example:\n' - '\n' - ' (Pdb) commands 1\n' - ' (com) p some_variable\n' - ' (com) end\n' - ' (Pdb)\n' - '\n' - ' To remove all commands from a breakpoint, type "commands" ' - 'and\n' - ' follow it immediately with "end"; that is, give no commands.\n' - '\n' - ' With no *bpnumber* argument, "commands" refers to the last\n' - ' breakpoint set.\n' - '\n' - ' You can use breakpoint commands to start your program up ' - 'again.\n' - ' Simply use the "continue" command, or "step", or any other ' - 'command\n' - ' that resumes execution.\n' - '\n' - ' Specifying any command resuming execution (currently ' - '"continue",\n' - ' "step", "next", "return", "until", "jump", "quit" and their\n' - ' abbreviations) terminates the command list (as if that ' - 'command was\n' - ' immediately followed by end). This is because any time you ' - 'resume\n' - ' execution (even with a simple next or step), you may ' - 'encounter\n' - ' another breakpoint—which could have its own command list, ' - 'leading\n' - ' to ambiguities about which list to execute.\n' - '\n' - ' If the list of commands contains the "silent" command, or a ' - 'command\n' - ' that resumes execution, then the breakpoint message ' - 'containing\n' - ' information about the frame is not displayed.\n' - '\n' - ' Changed in version 3.14: Frame information will not be ' - 'displayed if\n' - ' a command that resumes execution is present in the command ' - 'list.\n' - '\n' - 's(tep)\n' - '\n' - ' Execute the current line, stop at the first possible ' - 'occasion\n' - ' (either in a function that is called or on the next line in ' - 'the\n' - ' current function).\n' - '\n' - 'n(ext)\n' - '\n' - ' Continue execution until the next line in the current ' - 'function is\n' - ' reached or it returns. (The difference between "next" and ' - '"step"\n' - ' is that "step" stops inside a called function, while "next"\n' - ' executes called functions at (nearly) full speed, only ' - 'stopping at\n' - ' the next line in the current function.)\n' - '\n' - 'unt(il) [lineno]\n' - '\n' - ' Without argument, continue execution until the line with a ' - 'number\n' - ' greater than the current one is reached.\n' - '\n' - ' With *lineno*, continue execution until a line with a number\n' - ' greater or equal to *lineno* is reached. In both cases, also ' - 'stop\n' - ' when the current frame returns.\n' - '\n' - ' Changed in version 3.2: Allow giving an explicit line ' - 'number.\n' - '\n' - 'r(eturn)\n' - '\n' - ' Continue execution until the current function returns.\n' - '\n' - 'c(ont(inue))\n' - '\n' - ' Continue execution, only stop when a breakpoint is ' - 'encountered.\n' - '\n' - 'j(ump) lineno\n' - '\n' - ' Set the next line that will be executed. Only available in ' - 'the\n' - ' bottom-most frame. This lets you jump back and execute code ' - 'again,\n' - ' or jump forward to skip code that you don’t want to run.\n' - '\n' - ' It should be noted that not all jumps are allowed – for ' - 'instance it\n' - ' is not possible to jump into the middle of a "for" loop or ' - 'out of a\n' - ' "finally" clause.\n' - '\n' - 'l(ist) [first[, last]]\n' - '\n' - ' List source code for the current file. Without arguments, ' - 'list 11\n' - ' lines around the current line or continue the previous ' - 'listing.\n' - ' With "." as argument, list 11 lines around the current line. ' - 'With\n' - ' one argument, list 11 lines around at that line. With two\n' - ' arguments, list the given range; if the second argument is ' - 'less\n' - ' than the first, it is interpreted as a count.\n' - '\n' - ' The current line in the current frame is indicated by "->". ' - 'If an\n' - ' exception is being debugged, the line where the exception ' - 'was\n' - ' originally raised or propagated is indicated by ">>", if it ' - 'differs\n' - ' from the current line.\n' - '\n' - ' Changed in version 3.2: Added the ">>" marker.\n' - '\n' - 'll | longlist\n' - '\n' - ' List all source code for the current function or frame.\n' - ' Interesting lines are marked as for "list".\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'a(rgs)\n' - '\n' - ' Print the arguments of the current function and their ' - 'current\n' - ' values.\n' - '\n' - 'p expression\n' - '\n' - ' Evaluate *expression* in the current context and print its ' - 'value.\n' - '\n' - ' Note:\n' - '\n' - ' "print()" can also be used, but is not a debugger command — ' - 'this\n' - ' executes the Python "print()" function.\n' - '\n' - 'pp expression\n' - '\n' - ' Like the "p" command, except the value of *expression* is ' - 'pretty-\n' - ' printed using the "pprint" module.\n' - '\n' - 'whatis expression\n' - '\n' - ' Print the type of *expression*.\n' - '\n' - 'source expression\n' - '\n' - ' Try to get source code of *expression* and display it.\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'display [expression]\n' - '\n' - ' Display the value of *expression* if it changed, each time\n' - ' execution stops in the current frame.\n' - '\n' - ' Without *expression*, list all display expressions for the ' - 'current\n' - ' frame.\n' - '\n' - ' Note:\n' - '\n' - ' Display evaluates *expression* and compares to the result ' - 'of the\n' - ' previous evaluation of *expression*, so when the result is\n' - ' mutable, display may not be able to pick up the changes.\n' - '\n' - ' Example:\n' - '\n' - ' lst = []\n' - ' breakpoint()\n' - ' pass\n' - ' lst.append(1)\n' - ' print(lst)\n' - '\n' - ' Display won’t realize "lst" has been changed because the ' - 'result of\n' - ' evaluation is modified in place by "lst.append(1)" before ' - 'being\n' - ' compared:\n' - '\n' - ' > example.py(3)()\n' - ' -> pass\n' - ' (Pdb) display lst\n' - ' display lst: []\n' - ' (Pdb) n\n' - ' > example.py(4)()\n' - ' -> lst.append(1)\n' - ' (Pdb) n\n' - ' > example.py(5)()\n' - ' -> print(lst)\n' - ' (Pdb)\n' - '\n' - ' You can do some tricks with copy mechanism to make it work:\n' - '\n' - ' > example.py(3)()\n' - ' -> pass\n' - ' (Pdb) display lst[:]\n' - ' display lst[:]: []\n' - ' (Pdb) n\n' - ' > example.py(4)()\n' - ' -> lst.append(1)\n' - ' (Pdb) n\n' - ' > example.py(5)()\n' - ' -> print(lst)\n' - ' display lst[:]: [1] [old: []]\n' - ' (Pdb)\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'undisplay [expression]\n' - '\n' - ' Do not display *expression* anymore in the current frame. ' - 'Without\n' - ' *expression*, clear all display expressions for the current ' - 'frame.\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'interact\n' - '\n' - ' Start an interactive interpreter (using the "code" module) in ' - 'a new\n' - ' global namespace initialised from the local and global ' - 'namespaces\n' - ' for the current scope. Use "exit()" or "quit()" to exit the\n' - ' interpreter and return to the debugger.\n' - '\n' - ' Note:\n' - '\n' - ' As "interact" creates a new dedicated namespace for code\n' - ' execution, assignments to variables will not affect the ' - 'original\n' - ' namespaces. However, modifications to any referenced ' - 'mutable\n' - ' objects will be reflected in the original namespaces as ' - 'usual.\n' - '\n' - ' Added in version 3.2.\n' - '\n' - ' Changed in version 3.13: "exit()" and "quit()" can be used to ' - 'exit\n' - ' the "interact" command.\n' - '\n' - ' Changed in version 3.13: "interact" directs its output to ' - 'the\n' - ' debugger’s output channel rather than "sys.stderr".\n' - '\n' - 'alias [name [command]]\n' - '\n' - ' Create an alias called *name* that executes *command*. The\n' - ' *command* must *not* be enclosed in quotes. Replaceable ' - 'parameters\n' - ' can be indicated by "%1", "%2", … and "%9", while "%*" is ' - 'replaced\n' - ' by all the parameters. If *command* is omitted, the current ' - 'alias\n' - ' for *name* is shown. If no arguments are given, all aliases ' - 'are\n' - ' listed.\n' - '\n' - ' Aliases may be nested and can contain anything that can be ' - 'legally\n' - ' typed at the pdb prompt. Note that internal pdb commands ' - '*can* be\n' - ' overridden by aliases. Such a command is then hidden until ' - 'the\n' - ' alias is removed. Aliasing is recursively applied to the ' - 'first\n' - ' word of the command line; all other words in the line are ' - 'left\n' - ' alone.\n' - '\n' - ' As an example, here are two useful aliases (especially when ' - 'placed\n' - ' in the ".pdbrc" file):\n' - '\n' - ' # Print instance variables (usage "pi classInst")\n' - ' alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = ' - '{%1.__dict__[k]}")\n' - ' # Print instance variables in self\n' - ' alias ps pi self\n' - '\n' - 'unalias name\n' - '\n' - ' Delete the specified alias *name*.\n' - '\n' - '! statement\n' - '\n' - ' Execute the (one-line) *statement* in the context of the ' - 'current\n' - ' stack frame. The exclamation point can be omitted unless the ' - 'first\n' - ' word of the statement resembles a debugger command, e.g.:\n' - '\n' - ' (Pdb) ! n=42\n' - ' (Pdb)\n' - '\n' - ' To set a global variable, you can prefix the assignment ' - 'command\n' - ' with a "global" statement on the same line, e.g.:\n' - '\n' - " (Pdb) global list_options; list_options = ['-l']\n" - ' (Pdb)\n' - '\n' - 'run [args ...]\n' - 'restart [args ...]\n' - '\n' - ' Restart the debugged Python program. If *args* is supplied, ' - 'it is\n' - ' split with "shlex" and the result is used as the new ' - '"sys.argv".\n' - ' History, breakpoints, actions and debugger options are ' - 'preserved.\n' - ' "restart" is an alias for "run".\n' - '\n' - ' Changed in version 3.14: "run" and "restart" commands are ' - 'disabled\n' - ' when the debugger is invoked in "\'inline\'" mode.\n' - '\n' - 'q(uit)\n' - '\n' - ' Quit from the debugger. The program being executed is ' - 'aborted.\n' - '\n' - 'debug code\n' - '\n' - ' Enter a recursive debugger that steps through *code* (which ' - 'is an\n' - ' arbitrary expression or statement to be executed in the ' - 'current\n' - ' environment).\n' - '\n' - 'retval\n' - '\n' - ' Print the return value for the last return of the current ' - 'function.\n' - '\n' - 'exceptions [excnumber]\n' - '\n' - ' List or jump between chained exceptions.\n' - '\n' - ' When using "pdb.pm()" or "Pdb.post_mortem(...)" with a ' - 'chained\n' - ' exception instead of a traceback, it allows the user to move\n' - ' between the chained exceptions using "exceptions" command to ' - 'list\n' - ' exceptions, and "exception " to switch to that ' - 'exception.\n' - '\n' - ' Example:\n' - '\n' - ' def out():\n' - ' try:\n' - ' middle()\n' - ' except Exception as e:\n' - ' raise ValueError("reraise middle() error") from e\n' - '\n' - ' def middle():\n' - ' try:\n' - ' return inner(0)\n' - ' except Exception as e:\n' - ' raise ValueError("Middle fail")\n' - '\n' - ' def inner(x):\n' - ' 1 / x\n' - '\n' - ' out()\n' - '\n' - ' calling "pdb.pm()" will allow to move between exceptions:\n' - '\n' - ' > example.py(5)out()\n' - ' -> raise ValueError("reraise middle() error") from e\n' - '\n' - ' (Pdb) exceptions\n' - " 0 ZeroDivisionError('division by zero')\n" - " 1 ValueError('Middle fail')\n" - " > 2 ValueError('reraise middle() error')\n" - '\n' - ' (Pdb) exceptions 0\n' - ' > example.py(16)inner()\n' - ' -> 1 / x\n' - '\n' - ' (Pdb) up\n' - ' > example.py(10)middle()\n' - ' -> return inner(0)\n' - '\n' - ' Added in version 3.13.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] Whether a frame is considered to originate in a certain ' - 'module is\n' - ' determined by the "__name__" in the frame globals.\n', - 'del': 'The "del" statement\n' - '*******************\n' - '\n' - ' del_stmt ::= "del" target_list\n' - '\n' - 'Deletion is recursively defined very similar to the way assignment ' - 'is\n' - 'defined. Rather than spelling it out in full details, here are some\n' - 'hints.\n' - '\n' - 'Deletion of a target list recursively deletes each target, from left\n' - 'to right.\n' - '\n' - 'Deletion of a name removes the binding of that name from the local ' - 'or\n' - 'global namespace, depending on whether the name occurs in a "global"\n' - 'statement in the same code block. If the name is unbound, a\n' - '"NameError" exception will be raised.\n' - '\n' - 'Deletion of attribute references, subscriptions and slicings is ' - 'passed\n' - 'to the primary object involved; deletion of a slicing is in general\n' - 'equivalent to assignment of an empty slice of the right type (but ' - 'even\n' - 'this is determined by the sliced object).\n' - '\n' - 'Changed in version 3.2: Previously it was illegal to delete a name\n' - 'from the local namespace if it occurs as a free variable in a nested\n' - 'block.\n', - 'dict': 'Dictionary displays\n' - '*******************\n' - '\n' - 'A dictionary display is a possibly empty series of dict items\n' - '(key/value pairs) enclosed in curly braces:\n' - '\n' - ' dict_display ::= "{" [dict_item_list | dict_comprehension] ' - '"}"\n' - ' dict_item_list ::= dict_item ("," dict_item)* [","]\n' - ' dict_item ::= expression ":" expression | "**" or_expr\n' - ' dict_comprehension ::= expression ":" expression comp_for\n' - '\n' - 'A dictionary display yields a new dictionary object.\n' - '\n' - 'If a comma-separated sequence of dict items is given, they are\n' - 'evaluated from left to right to define the entries of the ' - 'dictionary:\n' - 'each key object is used as a key into the dictionary to store the\n' - 'corresponding value. This means that you can specify the same key\n' - 'multiple times in the dict item list, and the final dictionary’s ' - 'value\n' - 'for that key will be the last one given.\n' - '\n' - 'A double asterisk "**" denotes *dictionary unpacking*. Its operand\n' - 'must be a *mapping*. Each mapping item is added to the new\n' - 'dictionary. Later values replace values already set by earlier ' - 'dict\n' - 'items and earlier dictionary unpackings.\n' - '\n' - 'Added in version 3.5: Unpacking into dictionary displays, ' - 'originally\n' - 'proposed by **PEP 448**.\n' - '\n' - 'A dict comprehension, in contrast to list and set comprehensions,\n' - 'needs two expressions separated with a colon followed by the usual\n' - '“for†and “if†clauses. When the comprehension is run, the ' - 'resulting\n' - 'key and value elements are inserted in the new dictionary in the ' - 'order\n' - 'they are produced.\n' - '\n' - 'Restrictions on the types of the key values are listed earlier in\n' - 'section The standard type hierarchy. (To summarize, the key type\n' - 'should be *hashable*, which excludes all mutable objects.) Clashes\n' - 'between duplicate keys are not detected; the last value (textually\n' - 'rightmost in the display) stored for a given key value prevails.\n' - '\n' - 'Changed in version 3.8: Prior to Python 3.8, in dict ' - 'comprehensions,\n' - 'the evaluation order of key and value was not well-defined. In\n' - 'CPython, the value was evaluated before the key. Starting with ' - '3.8,\n' - 'the key is evaluated before the value, as proposed by **PEP 572**.\n', - 'dynamic-features': 'Interaction with dynamic features\n' - '*********************************\n' - '\n' - 'Name resolution of free variables occurs at runtime, not ' - 'at compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access ' - 'to the full\n' - 'environment for resolving names. Names may be resolved ' - 'in the local\n' - 'and global namespaces of the caller. Free variables are ' - 'not resolved\n' - 'in the nearest enclosing namespace, but in the global ' - 'namespace. [1]\n' - 'The "exec()" and "eval()" functions have optional ' - 'arguments to\n' - 'override the global and local namespace. If only one ' - 'namespace is\n' - 'specified, it is used for both.\n', - 'else': 'The "if" statement\n' - '******************\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the expressions ' - 'one\n' - 'by one until one is found to be true (see section Boolean ' - 'operations\n' - 'for the definition of true and false); then that suite is executed\n' - '(and no other part of the "if" statement is executed or evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, if\n' - 'present, is executed.\n', - 'exceptions': 'Exceptions\n' - '**********\n' - '\n' - 'Exceptions are a means of breaking out of the normal flow of ' - 'control\n' - 'of a code block in order to handle errors or other ' - 'exceptional\n' - 'conditions. An exception is *raised* at the point where the ' - 'error is\n' - 'detected; it may be *handled* by the surrounding code block or ' - 'by any\n' - 'code block that directly or indirectly invoked the code block ' - 'where\n' - 'the error occurred.\n' - '\n' - 'The Python interpreter raises an exception when it detects a ' - 'run-time\n' - 'error (such as division by zero). A Python program can also\n' - 'explicitly raise an exception with the "raise" statement. ' - 'Exception\n' - 'handlers are specified with the "try" … "except" statement. ' - 'The\n' - '"finally" clause of such a statement can be used to specify ' - 'cleanup\n' - 'code which does not handle the exception, but is executed ' - 'whether an\n' - 'exception occurred or not in the preceding code.\n' - '\n' - 'Python uses the “termination†model of error handling: an ' - 'exception\n' - 'handler can find out what happened and continue execution at ' - 'an outer\n' - 'level, but it cannot repair the cause of the error and retry ' - 'the\n' - 'failing operation (except by re-entering the offending piece ' - 'of code\n' - 'from the top).\n' - '\n' - 'When an exception is not handled at all, the interpreter ' - 'terminates\n' - 'execution of the program, or returns to its interactive main ' - 'loop. In\n' - 'either case, it prints a stack traceback, except when the ' - 'exception is\n' - '"SystemExit".\n' - '\n' - 'Exceptions are identified by class instances. The "except" ' - 'clause is\n' - 'selected depending on the class of the instance: it must ' - 'reference the\n' - 'class of the instance or a *non-virtual base class* thereof. ' - 'The\n' - 'instance can be received by the handler and can carry ' - 'additional\n' - 'information about the exceptional condition.\n' - '\n' - 'Note:\n' - '\n' - ' Exception messages are not part of the Python API. Their ' - 'contents\n' - ' may change from one version of Python to the next without ' - 'warning\n' - ' and should not be relied on by code which will run under ' - 'multiple\n' - ' versions of the interpreter.\n' - '\n' - 'See also the description of the "try" statement in section The ' - 'try\n' - 'statement and "raise" statement in section The raise ' - 'statement.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] This limitation occurs because the code that is executed ' - 'by these\n' - ' operations is not available at the time the module is ' - 'compiled.\n', - 'execmodel': 'Execution model\n' - '***************\n' - '\n' - '\n' - 'Structure of a program\n' - '======================\n' - '\n' - 'A Python program is constructed from code blocks. A *block* is ' - 'a piece\n' - 'of Python program text that is executed as a unit. The ' - 'following are\n' - 'blocks: a module, a function body, and a class definition. ' - 'Each\n' - 'command typed interactively is a block. A script file (a file ' - 'given\n' - 'as standard input to the interpreter or specified as a command ' - 'line\n' - 'argument to the interpreter) is a code block. A script command ' - '(a\n' - 'command specified on the interpreter command line with the ' - '"-c"\n' - 'option) is a code block. A module run as a top level script (as ' - 'module\n' - '"__main__") from the command line using a "-m" argument is also ' - 'a code\n' - 'block. The string argument passed to the built-in functions ' - '"eval()"\n' - 'and "exec()" is a code block.\n' - '\n' - 'A code block is executed in an *execution frame*. A frame ' - 'contains\n' - 'some administrative information (used for debugging) and ' - 'determines\n' - 'where and how execution continues after the code block’s ' - 'execution has\n' - 'completed.\n' - '\n' - '\n' - 'Naming and binding\n' - '==================\n' - '\n' - '\n' - 'Binding of names\n' - '----------------\n' - '\n' - '*Names* refer to objects. Names are introduced by name ' - 'binding\n' - 'operations.\n' - '\n' - 'The following constructs bind names:\n' - '\n' - '* formal parameters to functions,\n' - '\n' - '* class definitions,\n' - '\n' - '* function definitions,\n' - '\n' - '* assignment expressions,\n' - '\n' - '* targets that are identifiers if occurring in an assignment:\n' - '\n' - ' * "for" loop header,\n' - '\n' - ' * after "as" in a "with" statement, "except" clause, ' - '"except*"\n' - ' clause, or in the as-pattern in structural pattern ' - 'matching,\n' - '\n' - ' * in a capture pattern in structural pattern matching\n' - '\n' - '* "import" statements.\n' - '\n' - '* "type" statements.\n' - '\n' - '* type parameter lists.\n' - '\n' - 'The "import" statement of the form "from ... import *" binds ' - 'all names\n' - 'defined in the imported module, except those beginning with an\n' - 'underscore. This form may only be used at the module level.\n' - '\n' - 'A target occurring in a "del" statement is also considered ' - 'bound for\n' - 'this purpose (though the actual semantics are to unbind the ' - 'name).\n' - '\n' - 'Each assignment or import statement occurs within a block ' - 'defined by a\n' - 'class or function definition or at the module level (the ' - 'top-level\n' - 'code block).\n' - '\n' - 'If a name is bound in a block, it is a local variable of that ' - 'block,\n' - 'unless declared as "nonlocal" or "global". If a name is bound ' - 'at the\n' - 'module level, it is a global variable. (The variables of the ' - 'module\n' - 'code block are local and global.) If a variable is used in a ' - 'code\n' - 'block but not defined there, it is a *free variable*.\n' - '\n' - 'Each occurrence of a name in the program text refers to the ' - '*binding*\n' - 'of that name established by the following name resolution ' - 'rules.\n' - '\n' - '\n' - 'Resolution of names\n' - '-------------------\n' - '\n' - 'A *scope* defines the visibility of a name within a block. If ' - 'a local\n' - 'variable is defined in a block, its scope includes that block. ' - 'If the\n' - 'definition occurs in a function block, the scope extends to any ' - 'blocks\n' - 'contained within the defining one, unless a contained block ' - 'introduces\n' - 'a different binding for the name.\n' - '\n' - 'When a name is used in a code block, it is resolved using the ' - 'nearest\n' - 'enclosing scope. The set of all such scopes visible to a code ' - 'block\n' - 'is called the block’s *environment*.\n' - '\n' - 'When a name is not found at all, a "NameError" exception is ' - 'raised. If\n' - 'the current scope is a function scope, and the name refers to a ' - 'local\n' - 'variable that has not yet been bound to a value at the point ' - 'where the\n' - 'name is used, an "UnboundLocalError" exception is raised.\n' - '"UnboundLocalError" is a subclass of "NameError".\n' - '\n' - 'If a name binding operation occurs anywhere within a code ' - 'block, all\n' - 'uses of the name within the block are treated as references to ' - 'the\n' - 'current block. This can lead to errors when a name is used ' - 'within a\n' - 'block before it is bound. This rule is subtle. Python lacks\n' - 'declarations and allows name binding operations to occur ' - 'anywhere\n' - 'within a code block. The local variables of a code block can ' - 'be\n' - 'determined by scanning the entire text of the block for name ' - 'binding\n' - 'operations. See the FAQ entry on UnboundLocalError for ' - 'examples.\n' - '\n' - 'If the "global" statement occurs within a block, all uses of ' - 'the names\n' - 'specified in the statement refer to the bindings of those names ' - 'in the\n' - 'top-level namespace. Names are resolved in the top-level ' - 'namespace by\n' - 'searching the global namespace, i.e. the namespace of the ' - 'module\n' - 'containing the code block, and the builtins namespace, the ' - 'namespace\n' - 'of the module "builtins". The global namespace is searched ' - 'first. If\n' - 'the names are not found there, the builtins namespace is ' - 'searched\n' - 'next. If the names are also not found in the builtins ' - 'namespace, new\n' - 'variables are created in the global namespace. The global ' - 'statement\n' - 'must precede all uses of the listed names.\n' - '\n' - 'The "global" statement has the same scope as a name binding ' - 'operation\n' - 'in the same block. If the nearest enclosing scope for a free ' - 'variable\n' - 'contains a global statement, the free variable is treated as a ' - 'global.\n' - '\n' - 'The "nonlocal" statement causes corresponding names to refer ' - 'to\n' - 'previously bound variables in the nearest enclosing function ' - 'scope.\n' - '"SyntaxError" is raised at compile time if the given name does ' - 'not\n' - 'exist in any enclosing function scope. Type parameters cannot ' - 'be\n' - 'rebound with the "nonlocal" statement.\n' - '\n' - 'The namespace for a module is automatically created the first ' - 'time a\n' - 'module is imported. The main module for a script is always ' - 'called\n' - '"__main__".\n' - '\n' - 'Class definition blocks and arguments to "exec()" and "eval()" ' - 'are\n' - 'special in the context of name resolution. A class definition ' - 'is an\n' - 'executable statement that may use and define names. These ' - 'references\n' - 'follow the normal rules for name resolution with an exception ' - 'that\n' - 'unbound local variables are looked up in the global namespace. ' - 'The\n' - 'namespace of the class definition becomes the attribute ' - 'dictionary of\n' - 'the class. The scope of names defined in a class block is ' - 'limited to\n' - 'the class block; it does not extend to the code blocks of ' - 'methods.\n' - 'This includes comprehensions and generator expressions, but it ' - 'does\n' - 'not include annotation scopes, which have access to their ' - 'enclosing\n' - 'class scopes. This means that the following will fail:\n' - '\n' - ' class A:\n' - ' a = 42\n' - ' b = list(a + i for i in range(10))\n' - '\n' - 'However, the following will succeed:\n' - '\n' - ' class A:\n' - ' type Alias = Nested\n' - ' class Nested: pass\n' - '\n' - " print(A.Alias.__value__) # \n" - '\n' - '\n' - 'Annotation scopes\n' - '-----------------\n' - '\n' - '*Annotations*, type parameter lists and "type" statements ' - 'introduce\n' - '*annotation scopes*, which behave mostly like function scopes, ' - 'but\n' - 'with some exceptions discussed below.\n' - '\n' - 'Annotation scopes are used in the following contexts:\n' - '\n' - '* *Function annotations*.\n' - '\n' - '* *Variable annotations*.\n' - '\n' - '* Type parameter lists for generic type aliases.\n' - '\n' - '* Type parameter lists for generic functions. A generic ' - 'function’s\n' - ' annotations are executed within the annotation scope, but ' - 'its\n' - ' defaults and decorators are not.\n' - '\n' - '* Type parameter lists for generic classes. A generic class’s ' - 'base\n' - ' classes and keyword arguments are executed within the ' - 'annotation\n' - ' scope, but its decorators are not.\n' - '\n' - '* The bounds, constraints, and default values for type ' - 'parameters\n' - ' (lazily evaluated).\n' - '\n' - '* The value of type aliases (lazily evaluated).\n' - '\n' - 'Annotation scopes differ from function scopes in the following ' - 'ways:\n' - '\n' - '* Annotation scopes have access to their enclosing class ' - 'namespace. If\n' - ' an annotation scope is immediately within a class scope, or ' - 'within\n' - ' another annotation scope that is immediately within a class ' - 'scope,\n' - ' the code in the annotation scope can use names defined in the ' - 'class\n' - ' scope as if it were executed directly within the class body. ' - 'This\n' - ' contrasts with regular functions defined within classes, ' - 'which\n' - ' cannot access names defined in the class scope.\n' - '\n' - '* Expressions in annotation scopes cannot contain "yield", ' - '"yield\n' - ' from", "await", or ":=" expressions. (These expressions are ' - 'allowed\n' - ' in other scopes contained within the annotation scope.)\n' - '\n' - '* Names defined in annotation scopes cannot be rebound with ' - '"nonlocal"\n' - ' statements in inner scopes. This includes only type ' - 'parameters, as\n' - ' no other syntactic elements that can appear within annotation ' - 'scopes\n' - ' can introduce new names.\n' - '\n' - '* While annotation scopes have an internal name, that name is ' - 'not\n' - ' reflected in the *qualified name* of objects defined within ' - 'the\n' - ' scope. Instead, the "__qualname__" of such objects is as if ' - 'the\n' - ' object were defined in the enclosing scope.\n' - '\n' - 'Added in version 3.12: Annotation scopes were introduced in ' - 'Python\n' - '3.12 as part of **PEP 695**.\n' - '\n' - 'Changed in version 3.13: Annotation scopes are also used for ' - 'type\n' - 'parameter defaults, as introduced by **PEP 696**.\n' - '\n' - 'Changed in version 3.14: Annotation scopes are now also used ' - 'for\n' - 'annotations, as specified in **PEP 649** and **PEP 749**.\n' - '\n' - '\n' - 'Lazy evaluation\n' - '---------------\n' - '\n' - 'Most annotation scopes are *lazily evaluated*. This includes\n' - 'annotations, the values of type aliases created through the ' - '"type"\n' - 'statement, and the bounds, constraints, and default values of ' - 'type\n' - 'variables created through the type parameter syntax. This means ' - 'that\n' - 'they are not evaluated when the type alias or type variable is\n' - 'created, or when the object carrying annotations is created. ' - 'Instead,\n' - 'they are only evaluated when necessary, for example when the\n' - '"__value__" attribute on a type alias is accessed.\n' - '\n' - 'Example:\n' - '\n' - ' >>> type Alias = 1/0\n' - ' >>> Alias.__value__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - ' >>> def func[T: 1/0](): pass\n' - ' >>> T = func.__type_params__[0]\n' - ' >>> T.__bound__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - '\n' - 'Here the exception is raised only when the "__value__" ' - 'attribute of\n' - 'the type alias or the "__bound__" attribute of the type ' - 'variable is\n' - 'accessed.\n' - '\n' - 'This behavior is primarily useful for references to types that ' - 'have\n' - 'not yet been defined when the type alias or type variable is ' - 'created.\n' - 'For example, lazy evaluation enables creation of mutually ' - 'recursive\n' - 'type aliases:\n' - '\n' - ' from typing import Literal\n' - '\n' - ' type SimpleExpr = int | Parenthesized\n' - ' type Parenthesized = tuple[Literal["("], Expr, ' - 'Literal[")"]]\n' - ' type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", ' - '"-"], Expr]\n' - '\n' - 'Lazily evaluated values are evaluated in annotation scope, ' - 'which means\n' - 'that names that appear inside the lazily evaluated value are ' - 'looked up\n' - 'as if they were used in the immediately enclosing scope.\n' - '\n' - 'Added in version 3.12.\n' - '\n' - '\n' - 'Builtins and restricted execution\n' - '---------------------------------\n' - '\n' - '**CPython implementation detail:** Users should not touch\n' - '"__builtins__"; it is strictly an implementation detail. ' - 'Users\n' - 'wanting to override values in the builtins namespace should ' - '"import"\n' - 'the "builtins" module and modify its attributes appropriately.\n' - '\n' - 'The builtins namespace associated with the execution of a code ' - 'block\n' - 'is actually found by looking up the name "__builtins__" in its ' - 'global\n' - 'namespace; this should be a dictionary or a module (in the ' - 'latter case\n' - 'the module’s dictionary is used). By default, when in the ' - '"__main__"\n' - 'module, "__builtins__" is the built-in module "builtins"; when ' - 'in any\n' - 'other module, "__builtins__" is an alias for the dictionary of ' - 'the\n' - '"builtins" module itself.\n' - '\n' - '\n' - 'Interaction with dynamic features\n' - '---------------------------------\n' - '\n' - 'Name resolution of free variables occurs at runtime, not at ' - 'compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access to the ' - 'full\n' - 'environment for resolving names. Names may be resolved in the ' - 'local\n' - 'and global namespaces of the caller. Free variables are not ' - 'resolved\n' - 'in the nearest enclosing namespace, but in the global ' - 'namespace. [1]\n' - 'The "exec()" and "eval()" functions have optional arguments to\n' - 'override the global and local namespace. If only one namespace ' - 'is\n' - 'specified, it is used for both.\n' - '\n' - '\n' - 'Exceptions\n' - '==========\n' - '\n' - 'Exceptions are a means of breaking out of the normal flow of ' - 'control\n' - 'of a code block in order to handle errors or other exceptional\n' - 'conditions. An exception is *raised* at the point where the ' - 'error is\n' - 'detected; it may be *handled* by the surrounding code block or ' - 'by any\n' - 'code block that directly or indirectly invoked the code block ' - 'where\n' - 'the error occurred.\n' - '\n' - 'The Python interpreter raises an exception when it detects a ' - 'run-time\n' - 'error (such as division by zero). A Python program can also\n' - 'explicitly raise an exception with the "raise" statement. ' - 'Exception\n' - 'handlers are specified with the "try" … "except" statement. ' - 'The\n' - '"finally" clause of such a statement can be used to specify ' - 'cleanup\n' - 'code which does not handle the exception, but is executed ' - 'whether an\n' - 'exception occurred or not in the preceding code.\n' - '\n' - 'Python uses the “termination†model of error handling: an ' - 'exception\n' - 'handler can find out what happened and continue execution at an ' - 'outer\n' - 'level, but it cannot repair the cause of the error and retry ' - 'the\n' - 'failing operation (except by re-entering the offending piece of ' - 'code\n' - 'from the top).\n' - '\n' - 'When an exception is not handled at all, the interpreter ' - 'terminates\n' - 'execution of the program, or returns to its interactive main ' - 'loop. In\n' - 'either case, it prints a stack traceback, except when the ' - 'exception is\n' - '"SystemExit".\n' - '\n' - 'Exceptions are identified by class instances. The "except" ' - 'clause is\n' - 'selected depending on the class of the instance: it must ' - 'reference the\n' - 'class of the instance or a *non-virtual base class* thereof. ' - 'The\n' - 'instance can be received by the handler and can carry ' - 'additional\n' - 'information about the exceptional condition.\n' - '\n' - 'Note:\n' - '\n' - ' Exception messages are not part of the Python API. Their ' - 'contents\n' - ' may change from one version of Python to the next without ' - 'warning\n' - ' and should not be relied on by code which will run under ' - 'multiple\n' - ' versions of the interpreter.\n' - '\n' - 'See also the description of the "try" statement in section The ' - 'try\n' - 'statement and "raise" statement in section The raise ' - 'statement.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] This limitation occurs because the code that is executed by ' - 'these\n' - ' operations is not available at the time the module is ' - 'compiled.\n', - 'exprlists': 'Expression lists\n' - '****************\n' - '\n' - ' starred_expression ::= ["*"] or_expr\n' - ' flexible_expression ::= assignment_expression | ' - 'starred_expression\n' - ' flexible_expression_list ::= flexible_expression ("," ' - 'flexible_expression)* [","]\n' - ' starred_expression_list ::= starred_expression ("," ' - 'starred_expression)* [","]\n' - ' expression_list ::= expression ("," expression)* ' - '[","]\n' - ' yield_list ::= expression_list | ' - 'starred_expression "," [starred_expression_list]\n' - '\n' - 'Except when part of a list or set display, an expression list\n' - 'containing at least one comma yields a tuple. The length of ' - 'the tuple\n' - 'is the number of expressions in the list. The expressions are\n' - 'evaluated from left to right.\n' - '\n' - 'An asterisk "*" denotes *iterable unpacking*. Its operand must ' - 'be an\n' - '*iterable*. The iterable is expanded into a sequence of items, ' - 'which\n' - 'are included in the new tuple, list, or set, at the site of ' - 'the\n' - 'unpacking.\n' - '\n' - 'Added in version 3.5: Iterable unpacking in expression lists,\n' - 'originally proposed by **PEP 448**.\n' - '\n' - 'Added in version 3.11: Any item in an expression list may be ' - 'starred.\n' - 'See **PEP 646**.\n' - '\n' - 'A trailing comma is required only to create a one-item tuple, ' - 'such as\n' - '"1,"; it is optional in all other cases. A single expression ' - 'without a\n' - 'trailing comma doesn’t create a tuple, but rather yields the ' - 'value of\n' - 'that expression. (To create an empty tuple, use an empty pair ' - 'of\n' - 'parentheses: "()".)\n', - 'floating': 'Floating-point literals\n' - '***********************\n' - '\n' - 'Floating-point literals are described by the following lexical\n' - 'definitions:\n' - '\n' - ' floatnumber ::= pointfloat | exponentfloat\n' - ' pointfloat ::= [digitpart] fraction | digitpart "."\n' - ' exponentfloat ::= (digitpart | pointfloat) exponent\n' - ' digitpart ::= digit (["_"] digit)*\n' - ' fraction ::= "." digitpart\n' - ' exponent ::= ("e" | "E") ["+" | "-"] digitpart\n' - '\n' - 'Note that the integer and exponent parts are always interpreted ' - 'using\n' - 'radix 10. For example, "077e010" is legal, and denotes the same ' - 'number\n' - 'as "77e10". The allowed range of floating-point literals is\n' - 'implementation-dependent. As in integer literals, underscores ' - 'are\n' - 'supported for digit grouping.\n' - '\n' - 'Some examples of floating-point literals:\n' - '\n' - ' 3.14 10. .001 1e100 3.14e-10 0e0 ' - '3.14_15_93\n' - '\n' - 'Changed in version 3.6: Underscores are now allowed for ' - 'grouping\n' - 'purposes in literals.\n', - 'for': 'The "for" statement\n' - '*******************\n' - '\n' - 'The "for" statement is used to iterate over the elements of a ' - 'sequence\n' - '(such as a string, tuple or list) or other iterable object:\n' - '\n' - ' for_stmt ::= "for" target_list "in" starred_list ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'The "starred_list" expression is evaluated once; it should yield an\n' - '*iterable* object. An *iterator* is created for that iterable. The\n' - 'first item provided by the iterator is then assigned to the target\n' - 'list using the standard rules for assignments (see Assignment\n' - 'statements), and the suite is executed. This repeats for each item\n' - 'provided by the iterator. When the iterator is exhausted, the suite\n' - 'in the "else" clause, if present, is executed, and the loop\n' - 'terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the loop\n' - 'without executing the "else" clause’s suite. A "continue" statement\n' - 'executed in the first suite skips the rest of the suite and ' - 'continues\n' - 'with the next item, or with the "else" clause if there is no next\n' - 'item.\n' - '\n' - 'The for-loop makes assignments to the variables in the target list.\n' - 'This overwrites all previous assignments to those variables ' - 'including\n' - 'those made in the suite of the for-loop:\n' - '\n' - ' for i in range(10):\n' - ' print(i)\n' - ' i = 5 # this will not affect the for-loop\n' - ' # because i will be overwritten with the ' - 'next\n' - ' # index in the range\n' - '\n' - 'Names in the target list are not deleted when the loop is finished,\n' - 'but if the sequence is empty, they will not have been assigned to at\n' - 'all by the loop. Hint: the built-in type "range()" represents\n' - 'immutable arithmetic sequences of integers. For instance, iterating\n' - '"range(3)" successively yields 0, 1, and then 2.\n' - '\n' - 'Changed in version 3.11: Starred elements are now allowed in the\n' - 'expression list.\n', - 'formatstrings': 'Format String Syntax\n' - '********************\n' - '\n' - 'The "str.format()" method and the "Formatter" class share ' - 'the same\n' - 'syntax for format strings (although in the case of ' - '"Formatter",\n' - 'subclasses can define their own format string syntax). The ' - 'syntax is\n' - 'related to that of formatted string literals, but it is ' - 'less\n' - 'sophisticated and, in particular, does not support ' - 'arbitrary\n' - 'expressions.\n' - '\n' - 'Format strings contain “replacement fields†surrounded by ' - 'curly braces\n' - '"{}". Anything that is not contained in braces is ' - 'considered literal\n' - 'text, which is copied unchanged to the output. If you need ' - 'to include\n' - 'a brace character in the literal text, it can be escaped by ' - 'doubling:\n' - '"{{" and "}}".\n' - '\n' - 'The grammar for a replacement field is as follows:\n' - '\n' - ' replacement_field ::= "{" [field_name] ["!" conversion] ' - '[":" format_spec] "}"\n' - ' field_name ::= arg_name ("." attribute_name | "[" ' - 'element_index "]")*\n' - ' arg_name ::= [identifier | digit+]\n' - ' attribute_name ::= identifier\n' - ' element_index ::= digit+ | index_string\n' - ' index_string ::= ' - '+\n' - ' conversion ::= "r" | "s" | "a"\n' - ' format_spec ::= format-spec:format_spec\n' - '\n' - 'In less formal terms, the replacement field can start with ' - 'a\n' - '*field_name* that specifies the object whose value is to be ' - 'formatted\n' - 'and inserted into the output instead of the replacement ' - 'field. The\n' - '*field_name* is optionally followed by a *conversion* ' - 'field, which is\n' - 'preceded by an exclamation point "\'!\'", and a ' - '*format_spec*, which is\n' - 'preceded by a colon "\':\'". These specify a non-default ' - 'format for the\n' - 'replacement value.\n' - '\n' - 'See also the Format Specification Mini-Language section.\n' - '\n' - 'The *field_name* itself begins with an *arg_name* that is ' - 'either a\n' - 'number or a keyword. If it’s a number, it refers to a ' - 'positional\n' - 'argument, and if it’s a keyword, it refers to a named ' - 'keyword\n' - 'argument. An *arg_name* is treated as a number if a call ' - 'to\n' - '"str.isdecimal()" on the string would return true. If the ' - 'numerical\n' - 'arg_names in a format string are 0, 1, 2, … in sequence, ' - 'they can all\n' - 'be omitted (not just some) and the numbers 0, 1, 2, … will ' - 'be\n' - 'automatically inserted in that order. Because *arg_name* is ' - 'not quote-\n' - 'delimited, it is not possible to specify arbitrary ' - 'dictionary keys\n' - '(e.g., the strings "\'10\'" or "\':-]\'") within a format ' - 'string. The\n' - '*arg_name* can be followed by any number of index or ' - 'attribute\n' - 'expressions. An expression of the form "\'.name\'" selects ' - 'the named\n' - 'attribute using "getattr()", while an expression of the ' - 'form\n' - '"\'[index]\'" does an index lookup using "__getitem__()".\n' - '\n' - 'Changed in version 3.1: The positional argument specifiers ' - 'can be\n' - 'omitted for "str.format()", so "\'{} {}\'.format(a, b)" is ' - 'equivalent to\n' - '"\'{0} {1}\'.format(a, b)".\n' - '\n' - 'Changed in version 3.4: The positional argument specifiers ' - 'can be\n' - 'omitted for "Formatter".\n' - '\n' - 'Some simple format string examples:\n' - '\n' - ' "First, thou shalt count to {0}" # References first ' - 'positional argument\n' - ' "Bring me a {}" # Implicitly ' - 'references the first positional argument\n' - ' "From {} to {}" # Same as "From {0} to ' - '{1}"\n' - ' "My quest is {name}" # References keyword ' - "argument 'name'\n" - ' "Weight in tons {0.weight}" # \'weight\' attribute ' - 'of first positional arg\n' - ' "Units destroyed: {players[0]}" # First element of ' - "keyword argument 'players'.\n" - '\n' - 'The *conversion* field causes a type coercion before ' - 'formatting.\n' - 'Normally, the job of formatting a value is done by the ' - '"__format__()"\n' - 'method of the value itself. However, in some cases it is ' - 'desirable to\n' - 'force a type to be formatted as a string, overriding its ' - 'own\n' - 'definition of formatting. By converting the value to a ' - 'string before\n' - 'calling "__format__()", the normal formatting logic is ' - 'bypassed.\n' - '\n' - 'Three conversion flags are currently supported: "\'!s\'" ' - 'which calls\n' - '"str()" on the value, "\'!r\'" which calls "repr()" and ' - '"\'!a\'" which\n' - 'calls "ascii()".\n' - '\n' - 'Some examples:\n' - '\n' - ' "Harold\'s a clever {0!s}" # Calls str() on the ' - 'argument first\n' - ' "Bring out the holy {name!r}" # Calls repr() on the ' - 'argument first\n' - ' "More {!a}" # Calls ascii() on the ' - 'argument first\n' - '\n' - 'The *format_spec* field contains a specification of how the ' - 'value\n' - 'should be presented, including such details as field width, ' - 'alignment,\n' - 'padding, decimal precision and so on. Each value type can ' - 'define its\n' - 'own “formatting mini-language†or interpretation of the ' - '*format_spec*.\n' - '\n' - 'Most built-in types support a common formatting ' - 'mini-language, which\n' - 'is described in the next section.\n' - '\n' - 'A *format_spec* field can also include nested replacement ' - 'fields\n' - 'within it. These nested replacement fields may contain a ' - 'field name,\n' - 'conversion flag and format specification, but deeper ' - 'nesting is not\n' - 'allowed. The replacement fields within the format_spec ' - 'are\n' - 'substituted before the *format_spec* string is interpreted. ' - 'This\n' - 'allows the formatting of a value to be dynamically ' - 'specified.\n' - '\n' - 'See the Format examples section for some examples.\n' - '\n' - '\n' - 'Format Specification Mini-Language\n' - '==================================\n' - '\n' - '“Format specifications†are used within replacement fields ' - 'contained\n' - 'within a format string to define how individual values are ' - 'presented\n' - '(see Format String Syntax and f-strings). They can also be ' - 'passed\n' - 'directly to the built-in "format()" function. Each ' - 'formattable type\n' - 'may define how the format specification is to be ' - 'interpreted.\n' - '\n' - 'Most built-in types implement the following options for ' - 'format\n' - 'specifications, although some of the formatting options are ' - 'only\n' - 'supported by the numeric types.\n' - '\n' - 'A general convention is that an empty format specification ' - 'produces\n' - 'the same result as if you had called "str()" on the value. ' - 'A non-empty\n' - 'format specification typically modifies the result.\n' - '\n' - 'The general form of a *standard format specifier* is:\n' - '\n' - ' format_spec ::= ' - '[[fill]align][sign]["z"]["#"]["0"][width][grouping_option]["." ' - 'precision][type]\n' - ' fill ::= \n' - ' align ::= "<" | ">" | "=" | "^"\n' - ' sign ::= "+" | "-" | " "\n' - ' width ::= digit+\n' - ' grouping_option ::= "_" | ","\n' - ' precision ::= digit+\n' - ' type ::= "b" | "c" | "d" | "e" | "E" | "f" | ' - '"F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n' - '\n' - 'If a valid *align* value is specified, it can be preceded ' - 'by a *fill*\n' - 'character that can be any character and defaults to a space ' - 'if\n' - 'omitted. It is not possible to use a literal curly brace ' - '(â€"{"†or\n' - '“"}"â€) as the *fill* character in a formatted string ' - 'literal or when\n' - 'using the "str.format()" method. However, it is possible ' - 'to insert a\n' - 'curly brace with a nested replacement field. This ' - 'limitation doesn’t\n' - 'affect the "format()" function.\n' - '\n' - 'The meaning of the various alignment options is as ' - 'follows:\n' - '\n' - '+-----------+------------------------------------------------------------+\n' - '| Option | ' - 'Meaning ' - '|\n' - '|===========|============================================================|\n' - '| "\'<\'" | Forces the field to be left-aligned within ' - 'the available |\n' - '| | space (this is the default for most ' - 'objects). |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'>\'" | Forces the field to be right-aligned within ' - 'the available |\n' - '| | space (this is the default for ' - 'numbers). |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'=\'" | Forces the padding to be placed after the ' - 'sign (if any) |\n' - '| | but before the digits. This is used for ' - 'printing fields |\n' - '| | in the form ‘+000000120’. This alignment ' - 'option is only |\n' - '| | valid for numeric types, excluding "complex". ' - 'It becomes |\n' - '| | the default for numbers when ‘0’ immediately ' - 'precedes the |\n' - '| | field ' - 'width. |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'^\'" | Forces the field to be centered within the ' - 'available |\n' - '| | ' - 'space. ' - '|\n' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'Note that unless a minimum field width is defined, the ' - 'field width\n' - 'will always be the same size as the data to fill it, so ' - 'that the\n' - 'alignment option has no meaning in this case.\n' - '\n' - 'The *sign* option is only valid for number types, and can ' - 'be one of\n' - 'the following:\n' - '\n' - '+-----------+------------------------------------------------------------+\n' - '| Option | ' - 'Meaning ' - '|\n' - '|===========|============================================================|\n' - '| "\'+\'" | indicates that a sign should be used for ' - 'both positive as |\n' - '| | well as negative ' - 'numbers. |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'-\'" | indicates that a sign should be used only ' - 'for negative |\n' - '| | numbers (this is the default ' - 'behavior). |\n' - '+-----------+------------------------------------------------------------+\n' - '| space | indicates that a leading space should be used ' - 'on positive |\n' - '| | numbers, and a minus sign on negative ' - 'numbers. |\n' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The "\'z\'" option coerces negative zero floating-point ' - 'values to\n' - 'positive zero after rounding to the format precision. This ' - 'option is\n' - 'only valid for floating-point presentation types.\n' - '\n' - 'Changed in version 3.11: Added the "\'z\'" option (see also ' - '**PEP\n' - '682**).\n' - '\n' - 'The "\'#\'" option causes the “alternate form†to be used ' - 'for the\n' - 'conversion. The alternate form is defined differently for ' - 'different\n' - 'types. This option is only valid for integer, float and ' - 'complex\n' - 'types. For integers, when binary, octal, or hexadecimal ' - 'output is\n' - 'used, this option adds the respective prefix "\'0b\'", ' - '"\'0o\'", "\'0x\'",\n' - 'or "\'0X\'" to the output value. For float and complex the ' - 'alternate\n' - 'form causes the result of the conversion to always contain ' - 'a decimal-\n' - 'point character, even if no digits follow it. Normally, a ' - 'decimal-\n' - 'point character appears in the result of these conversions ' - 'only if a\n' - 'digit follows it. In addition, for "\'g\'" and "\'G\'" ' - 'conversions,\n' - 'trailing zeros are not removed from the result.\n' - '\n' - 'The "\',\'" option signals the use of a comma for a ' - 'thousands separator\n' - 'for floating-point presentation types and for integer ' - 'presentation\n' - 'type "\'d\'". For other presentation types, this option is ' - 'an error. For\n' - 'a locale aware separator, use the "\'n\'" integer ' - 'presentation type\n' - 'instead.\n' - '\n' - 'Changed in version 3.1: Added the "\',\'" option (see also ' - '**PEP 378**).\n' - '\n' - 'The "\'_\'" option signals the use of an underscore for a ' - 'thousands\n' - 'separator for floating-point presentation types and for ' - 'integer\n' - 'presentation type "\'d\'". For integer presentation types ' - '"\'b\'", "\'o\'",\n' - '"\'x\'", and "\'X\'", underscores will be inserted every 4 ' - 'digits. For\n' - 'other presentation types, specifying this option is an ' - 'error.\n' - '\n' - 'Changed in version 3.6: Added the "\'_\'" option (see also ' - '**PEP 515**).\n' - '\n' - '*width* is a decimal integer defining the minimum total ' - 'field width,\n' - 'including any prefixes, separators, and other formatting ' - 'characters.\n' - 'If not specified, then the field width will be determined ' - 'by the\n' - 'content.\n' - '\n' - 'When no explicit alignment is given, preceding the *width* ' - 'field by a\n' - 'zero ("\'0\'") character enables sign-aware zero-padding ' - 'for numeric\n' - 'types, excluding "complex". This is equivalent to a *fill* ' - 'character\n' - 'of "\'0\'" with an *alignment* type of "\'=\'".\n' - '\n' - 'Changed in version 3.10: Preceding the *width* field by ' - '"\'0\'" no\n' - 'longer affects the default alignment for strings.\n' - '\n' - 'The *precision* is a decimal integer indicating how many ' - 'digits should\n' - 'be displayed after the decimal point for presentation types ' - '"\'f\'" and\n' - '"\'F\'", or before and after the decimal point for ' - 'presentation types\n' - '"\'g\'" or "\'G\'". For string presentation types the ' - 'field indicates the\n' - 'maximum field size - in other words, how many characters ' - 'will be used\n' - 'from the field content. The *precision* is not allowed for ' - 'integer\n' - 'presentation types.\n' - '\n' - 'Finally, the *type* determines how the data should be ' - 'presented.\n' - '\n' - 'The available string presentation types are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'s\'" | String format. This is the default type ' - 'for strings and |\n' - ' | | may be ' - 'omitted. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | The same as ' - '"\'s\'". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The available integer presentation types are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'b\'" | Binary format. Outputs the number in ' - 'base 2. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'c\'" | Character. Converts the integer to the ' - 'corresponding |\n' - ' | | unicode character before ' - 'printing. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'d\'" | Decimal Integer. Outputs the number in ' - 'base 10. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'o\'" | Octal format. Outputs the number in base ' - '8. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'x\'" | Hex format. Outputs the number in base ' - '16, using lower- |\n' - ' | | case letters for the digits above ' - '9. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'X\'" | Hex format. Outputs the number in base ' - '16, using upper- |\n' - ' | | case letters for the digits above 9. In ' - 'case "\'#\'" is |\n' - ' | | specified, the prefix "\'0x\'" will be ' - 'upper-cased to "\'0X\'" |\n' - ' | | as ' - 'well. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'n\'" | Number. This is the same as "\'d\'", ' - 'except that it uses the |\n' - ' | | current locale setting to insert the ' - 'appropriate number |\n' - ' | | separator ' - 'characters. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | The same as ' - '"\'d\'". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'In addition to the above presentation types, integers can ' - 'be formatted\n' - 'with the floating-point presentation types listed below ' - '(except "\'n\'"\n' - 'and "None"). When doing so, "float()" is used to convert ' - 'the integer\n' - 'to a floating-point number before formatting.\n' - '\n' - 'The available presentation types for "float" and "Decimal" ' - 'values are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'e\'" | Scientific notation. For a given ' - 'precision "p", formats |\n' - ' | | the number in scientific notation with the ' - 'letter ‘e’ |\n' - ' | | separating the coefficient from the ' - 'exponent. The |\n' - ' | | coefficient has one digit before and "p" ' - 'digits after the |\n' - ' | | decimal point, for a total of "p + 1" ' - 'significant digits. |\n' - ' | | With no precision given, uses a precision ' - 'of "6" digits |\n' - ' | | after the decimal point for "float", and ' - 'shows all |\n' - ' | | coefficient digits for "Decimal". If ' - '"p=0", the decimal |\n' - ' | | point is omitted unless the "#" option is ' - 'used. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'E\'" | Scientific notation. Same as "\'e\'" ' - 'except it uses an upper |\n' - ' | | case ‘E’ as the separator ' - 'character. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'f\'" | Fixed-point notation. For a given ' - 'precision "p", formats |\n' - ' | | the number as a decimal number with ' - 'exactly "p" digits |\n' - ' | | following the decimal point. With no ' - 'precision given, uses |\n' - ' | | a precision of "6" digits after the ' - 'decimal point for |\n' - ' | | "float", and uses a precision large enough ' - 'to show all |\n' - ' | | coefficient digits for "Decimal". If ' - '"p=0", the decimal |\n' - ' | | point is omitted unless the "#" option is ' - 'used. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'F\'" | Fixed-point notation. Same as "\'f\'", ' - 'but converts "nan" to |\n' - ' | | "NAN" and "inf" to ' - '"INF". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'g\'" | General format. For a given precision ' - '"p >= 1", this |\n' - ' | | rounds the number to "p" significant ' - 'digits and then |\n' - ' | | formats the result in either fixed-point ' - 'format or in |\n' - ' | | scientific notation, depending on its ' - 'magnitude. A |\n' - ' | | precision of "0" is treated as equivalent ' - 'to a precision |\n' - ' | | of "1". The precise rules are as follows: ' - 'suppose that |\n' - ' | | the result formatted with presentation ' - 'type "\'e\'" and |\n' - ' | | precision "p-1" would have exponent ' - '"exp". Then, if "m <= |\n' - ' | | exp < p", where "m" is -4 for floats and ' - '-6 for |\n' - ' | | "Decimals", the number is formatted with ' - 'presentation type |\n' - ' | | "\'f\'" and precision "p-1-exp". ' - 'Otherwise, the number is |\n' - ' | | formatted with presentation type "\'e\'" ' - 'and precision |\n' - ' | | "p-1". In both cases insignificant ' - 'trailing zeros are |\n' - ' | | removed from the significand, and the ' - 'decimal point is |\n' - ' | | also removed if there are no remaining ' - 'digits following |\n' - ' | | it, unless the "\'#\'" option is used. ' - 'With no precision |\n' - ' | | given, uses a precision of "6" significant ' - 'digits for |\n' - ' | | "float". For "Decimal", the coefficient of ' - 'the result is |\n' - ' | | formed from the coefficient digits of the ' - 'value; |\n' - ' | | scientific notation is used for values ' - 'smaller than "1e-6" |\n' - ' | | in absolute value and values where the ' - 'place value of the |\n' - ' | | least significant digit is larger than 1, ' - 'and fixed-point |\n' - ' | | notation is used otherwise. Positive and ' - 'negative |\n' - ' | | infinity, positive and negative zero, and ' - 'nans, are |\n' - ' | | formatted as "inf", "-inf", "0", "-0" and ' - '"nan" |\n' - ' | | respectively, regardless of the ' - 'precision. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'G\'" | General format. Same as "\'g\'" except ' - 'switches to "\'E\'" if |\n' - ' | | the number gets too large. The ' - 'representations of infinity |\n' - ' | | and NaN are uppercased, ' - 'too. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'n\'" | Number. This is the same as "\'g\'", ' - 'except that it uses the |\n' - ' | | current locale setting to insert the ' - 'appropriate number |\n' - ' | | separator ' - 'characters. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'%\'" | Percentage. Multiplies the number by 100 ' - 'and displays in |\n' - ' | | fixed ("\'f\'") format, followed by a ' - 'percent sign. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | For "float" this is like the "\'g\'" type, ' - 'except that when |\n' - ' | | fixed- point notation is used to format ' - 'the result, it |\n' - ' | | always includes at least one digit past ' - 'the decimal point, |\n' - ' | | and switches to the scientific notation ' - 'when "exp >= p - |\n' - ' | | 1". When the precision is not specified, ' - 'the latter will |\n' - ' | | be as large as needed to represent the ' - 'given value |\n' - ' | | faithfully. For "Decimal", this is the ' - 'same as either |\n' - ' | | "\'g\'" or "\'G\'" depending on the value ' - 'of |\n' - ' | | "context.capitals" for the current decimal ' - 'context. The |\n' - ' | | overall effect is to match the output of ' - '"str()" as |\n' - ' | | altered by the other format ' - 'modifiers. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The result should be correctly rounded to a given precision ' - '"p" of\n' - 'digits after the decimal point. The rounding mode for ' - '"float" matches\n' - 'that of the "round()" builtin. For "Decimal", the rounding ' - 'mode of\n' - 'the current context will be used.\n' - '\n' - 'The available presentation types for "complex" are the same ' - 'as those\n' - 'for "float" ("\'%\'" is not allowed). Both the real and ' - 'imaginary\n' - 'components of a complex number are formatted as ' - 'floating-point\n' - 'numbers, according to the specified presentation type. ' - 'They are\n' - 'separated by the mandatory sign of the imaginary part, the ' - 'latter\n' - 'being terminated by a "j" suffix. If the presentation type ' - 'is\n' - 'missing, the result will match the output of "str()" ' - '(complex numbers\n' - 'with a non-zero real part are also surrounded by ' - 'parentheses),\n' - 'possibly altered by other format modifiers.\n' - '\n' - '\n' - 'Format examples\n' - '===============\n' - '\n' - 'This section contains examples of the "str.format()" syntax ' - 'and\n' - 'comparison with the old "%"-formatting.\n' - '\n' - 'In most of the cases the syntax is similar to the old ' - '"%"-formatting,\n' - 'with the addition of the "{}" and with ":" used instead of ' - '"%". For\n' - 'example, "\'%03.2f\'" can be translated to "\'{:03.2f}\'".\n' - '\n' - 'The new format syntax also supports new and different ' - 'options, shown\n' - 'in the following examples.\n' - '\n' - 'Accessing arguments by position:\n' - '\n' - " >>> '{0}, {1}, {2}'.format('a', 'b', 'c')\n" - " 'a, b, c'\n" - " >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only\n" - " 'a, b, c'\n" - " >>> '{2}, {1}, {0}'.format('a', 'b', 'c')\n" - " 'c, b, a'\n" - " >>> '{2}, {1}, {0}'.format(*'abc') # unpacking " - 'argument sequence\n' - " 'c, b, a'\n" - " >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' " - 'indices can be repeated\n' - " 'abracadabra'\n" - '\n' - 'Accessing arguments by name:\n' - '\n' - " >>> 'Coordinates: {latitude}, " - "{longitude}'.format(latitude='37.24N', " - "longitude='-115.81W')\n" - " 'Coordinates: 37.24N, -115.81W'\n" - " >>> coord = {'latitude': '37.24N', 'longitude': " - "'-115.81W'}\n" - " >>> 'Coordinates: {latitude}, " - "{longitude}'.format(**coord)\n" - " 'Coordinates: 37.24N, -115.81W'\n" - '\n' - 'Accessing arguments’ attributes:\n' - '\n' - ' >>> c = 3-5j\n' - " >>> ('The complex number {0} is formed from the real " - "part {0.real} '\n" - " ... 'and the imaginary part {0.imag}.').format(c)\n" - " 'The complex number (3-5j) is formed from the real part " - "3.0 and the imaginary part -5.0.'\n" - ' >>> class Point:\n' - ' ... def __init__(self, x, y):\n' - ' ... self.x, self.y = x, y\n' - ' ... def __str__(self):\n' - " ... return 'Point({self.x}, " - "{self.y})'.format(self=self)\n" - ' ...\n' - ' >>> str(Point(4, 2))\n' - " 'Point(4, 2)'\n" - '\n' - 'Accessing arguments’ items:\n' - '\n' - ' >>> coord = (3, 5)\n' - " >>> 'X: {0[0]}; Y: {0[1]}'.format(coord)\n" - " 'X: 3; Y: 5'\n" - '\n' - 'Replacing "%s" and "%r":\n' - '\n' - ' >>> "repr() shows quotes: {!r}; str() doesn\'t: ' - '{!s}".format(\'test1\', \'test2\')\n' - ' "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n' - '\n' - 'Aligning the text and specifying a width:\n' - '\n' - " >>> '{:<30}'.format('left aligned')\n" - " 'left aligned '\n" - " >>> '{:>30}'.format('right aligned')\n" - " ' right aligned'\n" - " >>> '{:^30}'.format('centered')\n" - " ' centered '\n" - " >>> '{:*^30}'.format('centered') # use '*' as a fill " - 'char\n' - " '***********centered***********'\n" - '\n' - 'Replacing "%+f", "%-f", and "% f" and specifying a sign:\n' - '\n' - " >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it " - 'always\n' - " '+3.140000; -3.140000'\n" - " >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space " - 'for positive numbers\n' - " ' 3.140000; -3.140000'\n" - " >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only the " - "minus -- same as '{:f}; {:f}'\n" - " '3.140000; -3.140000'\n" - '\n' - 'Replacing "%x" and "%o" and converting the value to ' - 'different bases:\n' - '\n' - ' >>> # format also supports binary numbers\n' - ' >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: ' - '{0:b}".format(42)\n' - " 'int: 42; hex: 2a; oct: 52; bin: 101010'\n" - ' >>> # with 0x, 0o, or 0b as prefix:\n' - ' >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: ' - '{0:#b}".format(42)\n' - " 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010'\n" - '\n' - 'Using the comma as a thousands separator:\n' - '\n' - " >>> '{:,}'.format(1234567890)\n" - " '1,234,567,890'\n" - '\n' - 'Expressing a percentage:\n' - '\n' - ' >>> points = 19\n' - ' >>> total = 22\n' - " >>> 'Correct answers: {:.2%}'.format(points/total)\n" - " 'Correct answers: 86.36%'\n" - '\n' - 'Using type-specific formatting:\n' - '\n' - ' >>> import datetime\n' - ' >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n' - " >>> '{:%Y-%m-%d %H:%M:%S}'.format(d)\n" - " '2010-07-04 12:15:58'\n" - '\n' - 'Nesting arguments and more complex examples:\n' - '\n' - " >>> for align, text in zip('<^>', ['left', 'center', " - "'right']):\n" - " ... '{0:{fill}{align}16}'.format(text, fill=align, " - 'align=align)\n' - ' ...\n' - " 'left<<<<<<<<<<<<'\n" - " '^^^^^center^^^^^'\n" - " '>>>>>>>>>>>right'\n" - ' >>>\n' - ' >>> octets = [192, 168, 0, 1]\n' - " >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets)\n" - " 'C0A80001'\n" - ' >>> int(_, 16)\n' - ' 3232235521\n' - ' >>>\n' - ' >>> width = 5\n' - ' >>> for num in range(5,12): \n' - " ... for base in 'dXob':\n" - " ... print('{0:{width}{base}}'.format(num, " - "base=base, width=width), end=' ')\n" - ' ... print()\n' - ' ...\n' - ' 5 5 5 101\n' - ' 6 6 6 110\n' - ' 7 7 7 111\n' - ' 8 8 10 1000\n' - ' 9 9 11 1001\n' - ' 10 A 12 1010\n' - ' 11 B 13 1011\n', - 'function': 'Function definitions\n' - '********************\n' - '\n' - 'A function definition defines a user-defined function object ' - '(see\n' - 'section The standard type hierarchy):\n' - '\n' - ' funcdef ::= [decorators] "def" funcname ' - '[type_params] "(" [parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - ' decorators ::= decorator+\n' - ' decorator ::= "@" assignment_expression ' - 'NEWLINE\n' - ' parameter_list ::= defparameter ("," ' - 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n' - ' | parameter_list_no_posonly\n' - ' parameter_list_no_posonly ::= defparameter ("," ' - 'defparameter)* ["," [parameter_list_starargs]]\n' - ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [star_parameter] ("," ' - 'defparameter)* ["," ["**" parameter [","]]]\n' - ' | "**" parameter [","]\n' - ' parameter ::= identifier [":" expression]\n' - ' star_parameter ::= identifier [":" ["*"] ' - 'expression]\n' - ' defparameter ::= parameter ["=" expression]\n' - ' funcname ::= identifier\n' - '\n' - 'A function definition is an executable statement. Its execution ' - 'binds\n' - 'the function name in the current local namespace to a function ' - 'object\n' - '(a wrapper around the executable code for the function). This\n' - 'function object contains a reference to the current global ' - 'namespace\n' - 'as the global namespace to be used when the function is called.\n' - '\n' - 'The function definition does not execute the function body; this ' - 'gets\n' - 'executed only when the function is called. [4]\n' - '\n' - 'A function definition may be wrapped by one or more *decorator*\n' - 'expressions. Decorator expressions are evaluated when the ' - 'function is\n' - 'defined, in the scope that contains the function definition. ' - 'The\n' - 'result must be a callable, which is invoked with the function ' - 'object\n' - 'as the only argument. The returned value is bound to the ' - 'function name\n' - 'instead of the function object. Multiple decorators are applied ' - 'in\n' - 'nested fashion. For example, the following code\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' def func(): pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' def func(): pass\n' - ' func = f1(arg)(f2(func))\n' - '\n' - 'except that the original function is not temporarily bound to ' - 'the name\n' - '"func".\n' - '\n' - 'Changed in version 3.9: Functions may be decorated with any ' - 'valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'between the\n' - 'function’s name and the opening parenthesis for its parameter ' - 'list.\n' - 'This indicates to static type checkers that the function is ' - 'generic.\n' - 'At runtime, the type parameters can be retrieved from the ' - 'function’s\n' - '"__type_params__" attribute. See Generic functions for more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - 'When one or more *parameters* have the form *parameter* "="\n' - '*expression*, the function is said to have “default parameter ' - 'values.â€\n' - 'For a parameter with a default value, the corresponding ' - '*argument* may\n' - 'be omitted from a call, in which case the parameter’s default ' - 'value is\n' - 'substituted. If a parameter has a default value, all following\n' - 'parameters up until the “"*"†must also have a default value — ' - 'this is\n' - 'a syntactic restriction that is not expressed by the grammar.\n' - '\n' - '**Default parameter values are evaluated from left to right when ' - 'the\n' - 'function definition is executed.** This means that the ' - 'expression is\n' - 'evaluated once, when the function is defined, and that the same ' - '“pre-\n' - 'computed†value is used for each call. This is especially ' - 'important\n' - 'to understand when a default parameter value is a mutable ' - 'object, such\n' - 'as a list or a dictionary: if the function modifies the object ' - '(e.g.\n' - 'by appending an item to a list), the default parameter value is ' - 'in\n' - 'effect modified. This is generally not what was intended. A ' - 'way\n' - 'around this is to use "None" as the default, and explicitly test ' - 'for\n' - 'it in the body of the function, e.g.:\n' - '\n' - ' def whats_on_the_telly(penguin=None):\n' - ' if penguin is None:\n' - ' penguin = []\n' - ' penguin.append("property of the zoo")\n' - ' return penguin\n' - '\n' - 'Function call semantics are described in more detail in section ' - 'Calls.\n' - 'A function call always assigns values to all parameters ' - 'mentioned in\n' - 'the parameter list, either from positional arguments, from ' - 'keyword\n' - 'arguments, or from default values. If the form “"*identifier"†' - 'is\n' - 'present, it is initialized to a tuple receiving any excess ' - 'positional\n' - 'parameters, defaulting to the empty tuple. If the form\n' - '“"**identifier"†is present, it is initialized to a new ordered\n' - 'mapping receiving any excess keyword arguments, defaulting to a ' - 'new\n' - 'empty mapping of the same type. Parameters after “"*"†or\n' - '“"*identifier"†are keyword-only parameters and may only be ' - 'passed by\n' - 'keyword arguments. Parameters before “"/"†are positional-only\n' - 'parameters and may only be passed by positional arguments.\n' - '\n' - 'Changed in version 3.8: The "/" function parameter syntax may be ' - 'used\n' - 'to indicate positional-only parameters. See **PEP 570** for ' - 'details.\n' - '\n' - 'Parameters may have an *annotation* of the form “": ' - 'expression"â€\n' - 'following the parameter name. Any parameter may have an ' - 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". (As a ' - 'special\n' - 'case, parameters of the form "*identifier" may have an ' - 'annotation “":\n' - '*expression"â€.) Functions may have “return†annotation of the ' - 'form\n' - '“"-> expression"†after the parameter list. These annotations ' - 'can be\n' - 'any valid Python expression. The presence of annotations does ' - 'not\n' - 'change the semantics of a function. See Annotations for more\n' - 'information on annotations.\n' - '\n' - 'Changed in version 3.11: Parameters of the form “"*identifier"†' - 'may\n' - 'have an annotation “": *expression"â€. See **PEP 646**.\n' - '\n' - 'It is also possible to create anonymous functions (functions not ' - 'bound\n' - 'to a name), for immediate use in expressions. This uses lambda\n' - 'expressions, described in section Lambdas. Note that the ' - 'lambda\n' - 'expression is merely a shorthand for a simplified function ' - 'definition;\n' - 'a function defined in a “"def"†statement can be passed around ' - 'or\n' - 'assigned to another name just like a function defined by a ' - 'lambda\n' - 'expression. The “"def"†form is actually more powerful since ' - 'it\n' - 'allows the execution of multiple statements and annotations.\n' - '\n' - '**Programmer’s note:** Functions are first-class objects. A ' - '“"def"â€\n' - 'statement executed inside a function definition defines a local\n' - 'function that can be returned or passed around. Free variables ' - 'used\n' - 'in the nested function can access the local variables of the ' - 'function\n' - 'containing the def. See section Naming and binding for ' - 'details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3107** - Function Annotations\n' - ' The original specification for function annotations.\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Definition of a standard meaning for annotations: type ' - 'hints.\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' Ability to type hint variable declarations, including ' - 'class\n' - ' variables and instance variables.\n' - '\n' - ' **PEP 563** - Postponed Evaluation of Annotations\n' - ' Support for forward references within annotations by ' - 'preserving\n' - ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n' - '\n' - ' **PEP 318** - Decorators for Functions and Methods\n' - ' Function and method decorators were introduced. Class ' - 'decorators\n' - ' were introduced in **PEP 3129**.\n', - 'global': 'The "global" statement\n' - '**********************\n' - '\n' - ' global_stmt ::= "global" identifier ("," identifier)*\n' - '\n' - 'The "global" statement causes the listed identifiers to be ' - 'interpreted\n' - 'as globals. It would be impossible to assign to a global variable\n' - 'without "global", although free variables may refer to globals ' - 'without\n' - 'being declared global.\n' - '\n' - 'The "global" statement applies to the entire scope of a function ' - 'or\n' - 'class body. A "SyntaxError" is raised if a variable is used or\n' - 'assigned to prior to its global declaration in the scope.\n' - '\n' - '**Programmer’s note:** "global" is a directive to the parser. It\n' - 'applies only to code parsed at the same time as the "global"\n' - 'statement. In particular, a "global" statement contained in a ' - 'string\n' - 'or code object supplied to the built-in "exec()" function does ' - 'not\n' - 'affect the code block *containing* the function call, and code\n' - 'contained in such a string is unaffected by "global" statements in ' - 'the\n' - 'code containing the function call. The same applies to the ' - '"eval()"\n' - 'and "compile()" functions.\n', - 'id-classes': 'Reserved classes of identifiers\n' - '*******************************\n' - '\n' - 'Certain classes of identifiers (besides keywords) have ' - 'special\n' - 'meanings. These classes are identified by the patterns of ' - 'leading and\n' - 'trailing underscore characters:\n' - '\n' - '"_*"\n' - ' Not imported by "from module import *".\n' - '\n' - '"_"\n' - ' In a "case" pattern within a "match" statement, "_" is a ' - 'soft\n' - ' keyword that denotes a wildcard.\n' - '\n' - ' Separately, the interactive interpreter makes the result of ' - 'the\n' - ' last evaluation available in the variable "_". (It is ' - 'stored in the\n' - ' "builtins" module, alongside built-in functions like ' - '"print".)\n' - '\n' - ' Elsewhere, "_" is a regular identifier. It is often used to ' - 'name\n' - ' “special†items, but it is not special to Python itself.\n' - '\n' - ' Note:\n' - '\n' - ' The name "_" is often used in conjunction with\n' - ' internationalization; refer to the documentation for the\n' - ' "gettext" module for more information on this ' - 'convention.It is\n' - ' also commonly used for unused variables.\n' - '\n' - '"__*__"\n' - ' System-defined names, informally known as “dunder†names. ' - 'These\n' - ' names are defined by the interpreter and its ' - 'implementation\n' - ' (including the standard library). Current system names are\n' - ' discussed in the Special method names section and ' - 'elsewhere. More\n' - ' will likely be defined in future versions of Python. *Any* ' - 'use of\n' - ' "__*__" names, in any context, that does not follow ' - 'explicitly\n' - ' documented use, is subject to breakage without warning.\n' - '\n' - '"__*"\n' - ' Class-private names. Names in this category, when used ' - 'within the\n' - ' context of a class definition, are re-written to use a ' - 'mangled form\n' - ' to help avoid name clashes between “private†attributes of ' - 'base and\n' - ' derived classes. See section Identifiers (Names).\n', - 'identifiers': 'Identifiers and keywords\n' - '************************\n' - '\n' - 'Identifiers (also referred to as *names*) are described by ' - 'the\n' - 'following lexical definitions.\n' - '\n' - 'The syntax of identifiers in Python is based on the Unicode ' - 'standard\n' - 'annex UAX-31, with elaboration and changes as defined below; ' - 'see also\n' - '**PEP 3131** for further details.\n' - '\n' - 'Within the ASCII range (U+0001..U+007F), the valid characters ' - 'for\n' - 'identifiers include the uppercase and lowercase letters "A" ' - 'through\n' - '"Z", the underscore "_" and, except for the first character, ' - 'the\n' - 'digits "0" through "9". Python 3.0 introduced additional ' - 'characters\n' - 'from outside the ASCII range (see **PEP 3131**). For these\n' - 'characters, the classification uses the version of the ' - 'Unicode\n' - 'Character Database as included in the "unicodedata" module.\n' - '\n' - 'Identifiers are unlimited in length. Case is significant.\n' - '\n' - ' identifier ::= xid_start xid_continue*\n' - ' id_start ::= \n' - ' id_continue ::= \n' - ' xid_start ::= \n' - ' xid_continue ::= \n' - '\n' - 'The Unicode category codes mentioned above stand for:\n' - '\n' - '* *Lu* - uppercase letters\n' - '\n' - '* *Ll* - lowercase letters\n' - '\n' - '* *Lt* - titlecase letters\n' - '\n' - '* *Lm* - modifier letters\n' - '\n' - '* *Lo* - other letters\n' - '\n' - '* *Nl* - letter numbers\n' - '\n' - '* *Mn* - nonspacing marks\n' - '\n' - '* *Mc* - spacing combining marks\n' - '\n' - '* *Nd* - decimal numbers\n' - '\n' - '* *Pc* - connector punctuations\n' - '\n' - '* *Other_ID_Start* - explicit list of characters in ' - 'PropList.txt to\n' - ' support backwards compatibility\n' - '\n' - '* *Other_ID_Continue* - likewise\n' - '\n' - 'All identifiers are converted into the normal form NFKC while ' - 'parsing;\n' - 'comparison of identifiers is based on NFKC.\n' - '\n' - 'A non-normative HTML file listing all valid identifier ' - 'characters for\n' - 'Unicode 16.0.0 can be found at\n' - 'https://www.unicode.org/Public/16.0.0/ucd/DerivedCoreProperties.txt\n' - '\n' - '\n' - 'Keywords\n' - '========\n' - '\n' - 'The following identifiers are used as reserved words, or ' - '*keywords* of\n' - 'the language, and cannot be used as ordinary identifiers. ' - 'They must\n' - 'be spelled exactly as written here:\n' - '\n' - ' False await else import pass\n' - ' None break except in raise\n' - ' True class finally is return\n' - ' and continue for lambda try\n' - ' as def from nonlocal while\n' - ' assert del global not with\n' - ' async elif if or yield\n' - '\n' - '\n' - 'Soft Keywords\n' - '=============\n' - '\n' - 'Added in version 3.10.\n' - '\n' - 'Some identifiers are only reserved under specific contexts. ' - 'These are\n' - 'known as *soft keywords*. The identifiers "match", "case", ' - '"type" and\n' - '"_" can syntactically act as keywords in certain contexts, ' - 'but this\n' - 'distinction is done at the parser level, not when ' - 'tokenizing.\n' - '\n' - 'As soft keywords, their use in the grammar is possible while ' - 'still\n' - 'preserving compatibility with existing code that uses these ' - 'names as\n' - 'identifier names.\n' - '\n' - '"match", "case", and "_" are used in the "match" statement. ' - '"type" is\n' - 'used in the "type" statement.\n' - '\n' - 'Changed in version 3.12: "type" is now a soft keyword.\n' - '\n' - '\n' - 'Reserved classes of identifiers\n' - '===============================\n' - '\n' - 'Certain classes of identifiers (besides keywords) have ' - 'special\n' - 'meanings. These classes are identified by the patterns of ' - 'leading and\n' - 'trailing underscore characters:\n' - '\n' - '"_*"\n' - ' Not imported by "from module import *".\n' - '\n' - '"_"\n' - ' In a "case" pattern within a "match" statement, "_" is a ' - 'soft\n' - ' keyword that denotes a wildcard.\n' - '\n' - ' Separately, the interactive interpreter makes the result ' - 'of the\n' - ' last evaluation available in the variable "_". (It is ' - 'stored in the\n' - ' "builtins" module, alongside built-in functions like ' - '"print".)\n' - '\n' - ' Elsewhere, "_" is a regular identifier. It is often used ' - 'to name\n' - ' “special†items, but it is not special to Python itself.\n' - '\n' - ' Note:\n' - '\n' - ' The name "_" is often used in conjunction with\n' - ' internationalization; refer to the documentation for ' - 'the\n' - ' "gettext" module for more information on this ' - 'convention.It is\n' - ' also commonly used for unused variables.\n' - '\n' - '"__*__"\n' - ' System-defined names, informally known as “dunder†names. ' - 'These\n' - ' names are defined by the interpreter and its ' - 'implementation\n' - ' (including the standard library). Current system names ' - 'are\n' - ' discussed in the Special method names section and ' - 'elsewhere. More\n' - ' will likely be defined in future versions of Python. ' - '*Any* use of\n' - ' "__*__" names, in any context, that does not follow ' - 'explicitly\n' - ' documented use, is subject to breakage without warning.\n' - '\n' - '"__*"\n' - ' Class-private names. Names in this category, when used ' - 'within the\n' - ' context of a class definition, are re-written to use a ' - 'mangled form\n' - ' to help avoid name clashes between “private†attributes of ' - 'base and\n' - ' derived classes. See section Identifiers (Names).\n', - 'if': 'The "if" statement\n' - '******************\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the expressions ' - 'one\n' - 'by one until one is found to be true (see section Boolean operations\n' - 'for the definition of true and false); then that suite is executed\n' - '(and no other part of the "if" statement is executed or evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, if\n' - 'present, is executed.\n', - 'imaginary': 'Imaginary literals\n' - '******************\n' - '\n' - 'Imaginary literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' imagnumber ::= (floatnumber | digitpart) ("j" | "J")\n' - '\n' - 'An imaginary literal yields a complex number with a real part ' - 'of 0.0.\n' - 'Complex numbers are represented as a pair of floating-point ' - 'numbers\n' - 'and have the same restrictions on their range. To create a ' - 'complex\n' - 'number with a nonzero real part, add a floating-point number to ' - 'it,\n' - 'e.g., "(3+4j)". Some examples of imaginary literals:\n' - '\n' - ' 3.14j 10.j 10j .001j 1e100j 3.14e-10j ' - '3.14_15_93j\n', - 'import': 'The "import" statement\n' - '**********************\n' - '\n' - ' import_stmt ::= "import" module ["as" identifier] ("," ' - 'module ["as" identifier])*\n' - ' | "from" relative_module "import" identifier ' - '["as" identifier]\n' - ' ("," identifier ["as" identifier])*\n' - ' | "from" relative_module "import" "(" ' - 'identifier ["as" identifier]\n' - ' ("," identifier ["as" identifier])* [","] ")"\n' - ' | "from" relative_module "import" "*"\n' - ' module ::= (identifier ".")* identifier\n' - ' relative_module ::= "."* module | "."+\n' - '\n' - 'The basic import statement (no "from" clause) is executed in two\n' - 'steps:\n' - '\n' - '1. find a module, loading and initializing it if necessary\n' - '\n' - '2. define a name or names in the local namespace for the scope ' - 'where\n' - ' the "import" statement occurs.\n' - '\n' - 'When the statement contains multiple clauses (separated by commas) ' - 'the\n' - 'two steps are carried out separately for each clause, just as ' - 'though\n' - 'the clauses had been separated out into individual import ' - 'statements.\n' - '\n' - 'The details of the first step, finding and loading modules, are\n' - 'described in greater detail in the section on the import system, ' - 'which\n' - 'also describes the various types of packages and modules that can ' - 'be\n' - 'imported, as well as all the hooks that can be used to customize ' - 'the\n' - 'import system. Note that failures in this step may indicate ' - 'either\n' - 'that the module could not be located, *or* that an error occurred\n' - 'while initializing the module, which includes execution of the\n' - 'module’s code.\n' - '\n' - 'If the requested module is retrieved successfully, it will be ' - 'made\n' - 'available in the local namespace in one of three ways:\n' - '\n' - '* If the module name is followed by "as", then the name following ' - '"as"\n' - ' is bound directly to the imported module.\n' - '\n' - '* If no other name is specified, and the module being imported is ' - 'a\n' - ' top level module, the module’s name is bound in the local ' - 'namespace\n' - ' as a reference to the imported module\n' - '\n' - '* If the module being imported is *not* a top level module, then ' - 'the\n' - ' name of the top level package that contains the module is bound ' - 'in\n' - ' the local namespace as a reference to the top level package. ' - 'The\n' - ' imported module must be accessed using its full qualified name\n' - ' rather than directly\n' - '\n' - 'The "from" form uses a slightly more complex process:\n' - '\n' - '1. find the module specified in the "from" clause, loading and\n' - ' initializing it if necessary;\n' - '\n' - '2. for each of the identifiers specified in the "import" clauses:\n' - '\n' - ' 1. check if the imported module has an attribute by that name\n' - '\n' - ' 2. if not, attempt to import a submodule with that name and ' - 'then\n' - ' check the imported module again for that attribute\n' - '\n' - ' 3. if the attribute is not found, "ImportError" is raised.\n' - '\n' - ' 4. otherwise, a reference to that value is stored in the local\n' - ' namespace, using the name in the "as" clause if it is ' - 'present,\n' - ' otherwise using the attribute name\n' - '\n' - 'Examples:\n' - '\n' - ' import foo # foo imported and bound locally\n' - ' import foo.bar.baz # foo, foo.bar, and foo.bar.baz ' - 'imported, foo bound locally\n' - ' import foo.bar.baz as fbb # foo, foo.bar, and foo.bar.baz ' - 'imported, foo.bar.baz bound as fbb\n' - ' from foo.bar import baz # foo, foo.bar, and foo.bar.baz ' - 'imported, foo.bar.baz bound as baz\n' - ' from foo import attr # foo imported and foo.attr bound as ' - 'attr\n' - '\n' - 'If the list of identifiers is replaced by a star ("\'*\'"), all ' - 'public\n' - 'names defined in the module are bound in the local namespace for ' - 'the\n' - 'scope where the "import" statement occurs.\n' - '\n' - 'The *public names* defined by a module are determined by checking ' - 'the\n' - 'module’s namespace for a variable named "__all__"; if defined, it ' - 'must\n' - 'be a sequence of strings which are names defined or imported by ' - 'that\n' - 'module. The names given in "__all__" are all considered public ' - 'and\n' - 'are required to exist. If "__all__" is not defined, the set of ' - 'public\n' - 'names includes all names found in the module’s namespace which do ' - 'not\n' - 'begin with an underscore character ("\'_\'"). "__all__" should ' - 'contain\n' - 'the entire public API. It is intended to avoid accidentally ' - 'exporting\n' - 'items that are not part of the API (such as library modules which ' - 'were\n' - 'imported and used within the module).\n' - '\n' - 'The wild card form of import — "from module import *" — is only\n' - 'allowed at the module level. Attempting to use it in class or\n' - 'function definitions will raise a "SyntaxError".\n' - '\n' - 'When specifying what module to import you do not have to specify ' - 'the\n' - 'absolute name of the module. When a module or package is ' - 'contained\n' - 'within another package it is possible to make a relative import ' - 'within\n' - 'the same top package without having to mention the package name. ' - 'By\n' - 'using leading dots in the specified module or package after "from" ' - 'you\n' - 'can specify how high to traverse up the current package hierarchy\n' - 'without specifying exact names. One leading dot means the current\n' - 'package where the module making the import exists. Two dots means ' - 'up\n' - 'one package level. Three dots is up two levels, etc. So if you ' - 'execute\n' - '"from . import mod" from a module in the "pkg" package then you ' - 'will\n' - 'end up importing "pkg.mod". If you execute "from ..subpkg2 import ' - 'mod"\n' - 'from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\n' - 'specification for relative imports is contained in the Package\n' - 'Relative Imports section.\n' - '\n' - '"importlib.import_module()" is provided to support applications ' - 'that\n' - 'determine dynamically the modules to be loaded.\n' - '\n' - 'Raises an auditing event "import" with arguments "module", ' - '"filename",\n' - '"sys.path", "sys.meta_path", "sys.path_hooks".\n' - '\n' - '\n' - 'Future statements\n' - '=================\n' - '\n' - 'A *future statement* is a directive to the compiler that a ' - 'particular\n' - 'module should be compiled using syntax or semantics that will be\n' - 'available in a specified future release of Python where the ' - 'feature\n' - 'becomes standard.\n' - '\n' - 'The future statement is intended to ease migration to future ' - 'versions\n' - 'of Python that introduce incompatible changes to the language. ' - 'It\n' - 'allows use of the new features on a per-module basis before the\n' - 'release in which the feature becomes standard.\n' - '\n' - ' future_stmt ::= "from" "__future__" "import" feature ["as" ' - 'identifier]\n' - ' ("," feature ["as" identifier])*\n' - ' | "from" "__future__" "import" "(" feature ' - '["as" identifier]\n' - ' ("," feature ["as" identifier])* [","] ")"\n' - ' feature ::= identifier\n' - '\n' - 'A future statement must appear near the top of the module. The ' - 'only\n' - 'lines that can appear before a future statement are:\n' - '\n' - '* the module docstring (if any),\n' - '\n' - '* comments,\n' - '\n' - '* blank lines, and\n' - '\n' - '* other future statements.\n' - '\n' - 'The only feature that requires using the future statement is\n' - '"annotations" (see **PEP 563**).\n' - '\n' - 'All historical features enabled by the future statement are still\n' - 'recognized by Python 3. The list includes "absolute_import",\n' - '"division", "generators", "generator_stop", "unicode_literals",\n' - '"print_function", "nested_scopes" and "with_statement". They are ' - 'all\n' - 'redundant because they are always enabled, and only kept for ' - 'backwards\n' - 'compatibility.\n' - '\n' - 'A future statement is recognized and treated specially at compile\n' - 'time: Changes to the semantics of core constructs are often\n' - 'implemented by generating different code. It may even be the ' - 'case\n' - 'that a new feature introduces new incompatible syntax (such as a ' - 'new\n' - 'reserved word), in which case the compiler may need to parse the\n' - 'module differently. Such decisions cannot be pushed off until\n' - 'runtime.\n' - '\n' - 'For any given release, the compiler knows which feature names ' - 'have\n' - 'been defined, and raises a compile-time error if a future ' - 'statement\n' - 'contains a feature not known to it.\n' - '\n' - 'The direct runtime semantics are the same as for any import ' - 'statement:\n' - 'there is a standard module "__future__", described later, and it ' - 'will\n' - 'be imported in the usual way at the time the future statement is\n' - 'executed.\n' - '\n' - 'The interesting runtime semantics depend on the specific feature\n' - 'enabled by the future statement.\n' - '\n' - 'Note that there is nothing special about the statement:\n' - '\n' - ' import __future__ [as name]\n' - '\n' - 'That is not a future statement; it’s an ordinary import statement ' - 'with\n' - 'no special semantics or syntax restrictions.\n' - '\n' - 'Code compiled by calls to the built-in functions "exec()" and\n' - '"compile()" that occur in a module "M" containing a future ' - 'statement\n' - 'will, by default, use the new syntax or semantics associated with ' - 'the\n' - 'future statement. This can be controlled by optional arguments ' - 'to\n' - '"compile()" — see the documentation of that function for details.\n' - '\n' - 'A future statement typed at an interactive interpreter prompt ' - 'will\n' - 'take effect for the rest of the interpreter session. If an\n' - 'interpreter is started with the "-i" option, is passed a script ' - 'name\n' - 'to execute, and the script includes a future statement, it will be ' - 'in\n' - 'effect in the interactive session started after the script is\n' - 'executed.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 236** - Back to the __future__\n' - ' The original proposal for the __future__ mechanism.\n', - 'in': 'Membership test operations\n' - '**************************\n' - '\n' - 'The operators "in" and "not in" test for membership. "x in s"\n' - 'evaluates to "True" if *x* is a member of *s*, and "False" otherwise.\n' - '"x not in s" returns the negation of "x in s". All built-in ' - 'sequences\n' - 'and set types support this as well as dictionary, for which "in" ' - 'tests\n' - 'whether the dictionary has a given key. For container types such as\n' - 'list, tuple, set, frozenset, dict, or collections.deque, the\n' - 'expression "x in y" is equivalent to "any(x is e or x == e for e in\n' - 'y)".\n' - '\n' - 'For the string and bytes types, "x in y" is "True" if and only if *x*\n' - 'is a substring of *y*. An equivalent test is "y.find(x) != -1".\n' - 'Empty strings are always considered to be a substring of any other\n' - 'string, so """ in "abc"" will return "True".\n' - '\n' - 'For user-defined classes which define the "__contains__()" method, "x\n' - 'in y" returns "True" if "y.__contains__(x)" returns a true value, and\n' - '"False" otherwise.\n' - '\n' - 'For user-defined classes which do not define "__contains__()" but do\n' - 'define "__iter__()", "x in y" is "True" if some value "z", for which\n' - 'the expression "x is z or x == z" is true, is produced while ' - 'iterating\n' - 'over "y". If an exception is raised during the iteration, it is as if\n' - '"in" raised that exception.\n' - '\n' - 'Lastly, the old-style iteration protocol is tried: if a class defines\n' - '"__getitem__()", "x in y" is "True" if and only if there is a non-\n' - 'negative integer index *i* such that "x is y[i] or x == y[i]", and no\n' - 'lower integer index raises the "IndexError" exception. (If any other\n' - 'exception is raised, it is as if "in" raised that exception).\n' - '\n' - 'The operator "not in" is defined to have the inverse truth value of\n' - '"in".\n', - 'integers': 'Integer literals\n' - '****************\n' - '\n' - 'Integer literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' integer ::= decinteger | bininteger | octinteger | ' - 'hexinteger\n' - ' decinteger ::= nonzerodigit (["_"] digit)* | "0"+ (["_"] ' - '"0")*\n' - ' bininteger ::= "0" ("b" | "B") (["_"] bindigit)+\n' - ' octinteger ::= "0" ("o" | "O") (["_"] octdigit)+\n' - ' hexinteger ::= "0" ("x" | "X") (["_"] hexdigit)+\n' - ' nonzerodigit ::= "1"..."9"\n' - ' digit ::= "0"..."9"\n' - ' bindigit ::= "0" | "1"\n' - ' octdigit ::= "0"..."7"\n' - ' hexdigit ::= digit | "a"..."f" | "A"..."F"\n' - '\n' - 'There is no limit for the length of integer literals apart from ' - 'what\n' - 'can be stored in available memory.\n' - '\n' - 'Underscores are ignored for determining the numeric value of ' - 'the\n' - 'literal. They can be used to group digits for enhanced ' - 'readability.\n' - 'One underscore can occur between digits, and after base ' - 'specifiers\n' - 'like "0x".\n' - '\n' - 'Note that leading zeros in a non-zero decimal number are not ' - 'allowed.\n' - 'This is for disambiguation with C-style octal literals, which ' - 'Python\n' - 'used before version 3.0.\n' - '\n' - 'Some examples of integer literals:\n' - '\n' - ' 7 2147483647 0o177 0b100110111\n' - ' 3 79228162514264337593543950336 0o377 0xdeadbeef\n' - ' 100_000_000_000 0b_1110_0101\n' - '\n' - 'Changed in version 3.6: Underscores are now allowed for ' - 'grouping\n' - 'purposes in literals.\n', - 'lambda': 'Lambdas\n' - '*******\n' - '\n' - ' lambda_expr ::= "lambda" [parameter_list] ":" expression\n' - '\n' - 'Lambda expressions (sometimes called lambda forms) are used to ' - 'create\n' - 'anonymous functions. The expression "lambda parameters: ' - 'expression"\n' - 'yields a function object. The unnamed object behaves like a ' - 'function\n' - 'object defined with:\n' - '\n' - ' def (parameters):\n' - ' return expression\n' - '\n' - 'See section Function definitions for the syntax of parameter ' - 'lists.\n' - 'Note that functions created with lambda expressions cannot ' - 'contain\n' - 'statements or annotations.\n', - 'lists': 'List displays\n' - '*************\n' - '\n' - 'A list display is a possibly empty series of expressions enclosed ' - 'in\n' - 'square brackets:\n' - '\n' - ' list_display ::= "[" [flexible_expression_list | comprehension] ' - '"]"\n' - '\n' - 'A list display yields a new list object, the contents being ' - 'specified\n' - 'by either a list of expressions or a comprehension. When a comma-\n' - 'separated list of expressions is supplied, its elements are ' - 'evaluated\n' - 'from left to right and placed into the list object in that order.\n' - 'When a comprehension is supplied, the list is constructed from the\n' - 'elements resulting from the comprehension.\n', - 'naming': 'Naming and binding\n' - '******************\n' - '\n' - '\n' - 'Binding of names\n' - '================\n' - '\n' - '*Names* refer to objects. Names are introduced by name binding\n' - 'operations.\n' - '\n' - 'The following constructs bind names:\n' - '\n' - '* formal parameters to functions,\n' - '\n' - '* class definitions,\n' - '\n' - '* function definitions,\n' - '\n' - '* assignment expressions,\n' - '\n' - '* targets that are identifiers if occurring in an assignment:\n' - '\n' - ' * "for" loop header,\n' - '\n' - ' * after "as" in a "with" statement, "except" clause, "except*"\n' - ' clause, or in the as-pattern in structural pattern matching,\n' - '\n' - ' * in a capture pattern in structural pattern matching\n' - '\n' - '* "import" statements.\n' - '\n' - '* "type" statements.\n' - '\n' - '* type parameter lists.\n' - '\n' - 'The "import" statement of the form "from ... import *" binds all ' - 'names\n' - 'defined in the imported module, except those beginning with an\n' - 'underscore. This form may only be used at the module level.\n' - '\n' - 'A target occurring in a "del" statement is also considered bound ' - 'for\n' - 'this purpose (though the actual semantics are to unbind the ' - 'name).\n' - '\n' - 'Each assignment or import statement occurs within a block defined ' - 'by a\n' - 'class or function definition or at the module level (the ' - 'top-level\n' - 'code block).\n' - '\n' - 'If a name is bound in a block, it is a local variable of that ' - 'block,\n' - 'unless declared as "nonlocal" or "global". If a name is bound at ' - 'the\n' - 'module level, it is a global variable. (The variables of the ' - 'module\n' - 'code block are local and global.) If a variable is used in a ' - 'code\n' - 'block but not defined there, it is a *free variable*.\n' - '\n' - 'Each occurrence of a name in the program text refers to the ' - '*binding*\n' - 'of that name established by the following name resolution rules.\n' - '\n' - '\n' - 'Resolution of names\n' - '===================\n' - '\n' - 'A *scope* defines the visibility of a name within a block. If a ' - 'local\n' - 'variable is defined in a block, its scope includes that block. If ' - 'the\n' - 'definition occurs in a function block, the scope extends to any ' - 'blocks\n' - 'contained within the defining one, unless a contained block ' - 'introduces\n' - 'a different binding for the name.\n' - '\n' - 'When a name is used in a code block, it is resolved using the ' - 'nearest\n' - 'enclosing scope. The set of all such scopes visible to a code ' - 'block\n' - 'is called the block’s *environment*.\n' - '\n' - 'When a name is not found at all, a "NameError" exception is ' - 'raised. If\n' - 'the current scope is a function scope, and the name refers to a ' - 'local\n' - 'variable that has not yet been bound to a value at the point where ' - 'the\n' - 'name is used, an "UnboundLocalError" exception is raised.\n' - '"UnboundLocalError" is a subclass of "NameError".\n' - '\n' - 'If a name binding operation occurs anywhere within a code block, ' - 'all\n' - 'uses of the name within the block are treated as references to ' - 'the\n' - 'current block. This can lead to errors when a name is used within ' - 'a\n' - 'block before it is bound. This rule is subtle. Python lacks\n' - 'declarations and allows name binding operations to occur anywhere\n' - 'within a code block. The local variables of a code block can be\n' - 'determined by scanning the entire text of the block for name ' - 'binding\n' - 'operations. See the FAQ entry on UnboundLocalError for examples.\n' - '\n' - 'If the "global" statement occurs within a block, all uses of the ' - 'names\n' - 'specified in the statement refer to the bindings of those names in ' - 'the\n' - 'top-level namespace. Names are resolved in the top-level ' - 'namespace by\n' - 'searching the global namespace, i.e. the namespace of the module\n' - 'containing the code block, and the builtins namespace, the ' - 'namespace\n' - 'of the module "builtins". The global namespace is searched ' - 'first. If\n' - 'the names are not found there, the builtins namespace is searched\n' - 'next. If the names are also not found in the builtins namespace, ' - 'new\n' - 'variables are created in the global namespace. The global ' - 'statement\n' - 'must precede all uses of the listed names.\n' - '\n' - 'The "global" statement has the same scope as a name binding ' - 'operation\n' - 'in the same block. If the nearest enclosing scope for a free ' - 'variable\n' - 'contains a global statement, the free variable is treated as a ' - 'global.\n' - '\n' - 'The "nonlocal" statement causes corresponding names to refer to\n' - 'previously bound variables in the nearest enclosing function ' - 'scope.\n' - '"SyntaxError" is raised at compile time if the given name does ' - 'not\n' - 'exist in any enclosing function scope. Type parameters cannot be\n' - 'rebound with the "nonlocal" statement.\n' - '\n' - 'The namespace for a module is automatically created the first time ' - 'a\n' - 'module is imported. The main module for a script is always ' - 'called\n' - '"__main__".\n' - '\n' - 'Class definition blocks and arguments to "exec()" and "eval()" ' - 'are\n' - 'special in the context of name resolution. A class definition is ' - 'an\n' - 'executable statement that may use and define names. These ' - 'references\n' - 'follow the normal rules for name resolution with an exception ' - 'that\n' - 'unbound local variables are looked up in the global namespace. ' - 'The\n' - 'namespace of the class definition becomes the attribute dictionary ' - 'of\n' - 'the class. The scope of names defined in a class block is limited ' - 'to\n' - 'the class block; it does not extend to the code blocks of ' - 'methods.\n' - 'This includes comprehensions and generator expressions, but it ' - 'does\n' - 'not include annotation scopes, which have access to their ' - 'enclosing\n' - 'class scopes. This means that the following will fail:\n' - '\n' - ' class A:\n' - ' a = 42\n' - ' b = list(a + i for i in range(10))\n' - '\n' - 'However, the following will succeed:\n' - '\n' - ' class A:\n' - ' type Alias = Nested\n' - ' class Nested: pass\n' - '\n' - " print(A.Alias.__value__) # \n" - '\n' - '\n' - 'Annotation scopes\n' - '=================\n' - '\n' - '*Annotations*, type parameter lists and "type" statements ' - 'introduce\n' - '*annotation scopes*, which behave mostly like function scopes, ' - 'but\n' - 'with some exceptions discussed below.\n' - '\n' - 'Annotation scopes are used in the following contexts:\n' - '\n' - '* *Function annotations*.\n' - '\n' - '* *Variable annotations*.\n' - '\n' - '* Type parameter lists for generic type aliases.\n' - '\n' - '* Type parameter lists for generic functions. A generic ' - 'function’s\n' - ' annotations are executed within the annotation scope, but its\n' - ' defaults and decorators are not.\n' - '\n' - '* Type parameter lists for generic classes. A generic class’s ' - 'base\n' - ' classes and keyword arguments are executed within the ' - 'annotation\n' - ' scope, but its decorators are not.\n' - '\n' - '* The bounds, constraints, and default values for type parameters\n' - ' (lazily evaluated).\n' - '\n' - '* The value of type aliases (lazily evaluated).\n' - '\n' - 'Annotation scopes differ from function scopes in the following ' - 'ways:\n' - '\n' - '* Annotation scopes have access to their enclosing class ' - 'namespace. If\n' - ' an annotation scope is immediately within a class scope, or ' - 'within\n' - ' another annotation scope that is immediately within a class ' - 'scope,\n' - ' the code in the annotation scope can use names defined in the ' - 'class\n' - ' scope as if it were executed directly within the class body. ' - 'This\n' - ' contrasts with regular functions defined within classes, which\n' - ' cannot access names defined in the class scope.\n' - '\n' - '* Expressions in annotation scopes cannot contain "yield", "yield\n' - ' from", "await", or ":=" expressions. (These expressions are ' - 'allowed\n' - ' in other scopes contained within the annotation scope.)\n' - '\n' - '* Names defined in annotation scopes cannot be rebound with ' - '"nonlocal"\n' - ' statements in inner scopes. This includes only type parameters, ' - 'as\n' - ' no other syntactic elements that can appear within annotation ' - 'scopes\n' - ' can introduce new names.\n' - '\n' - '* While annotation scopes have an internal name, that name is not\n' - ' reflected in the *qualified name* of objects defined within the\n' - ' scope. Instead, the "__qualname__" of such objects is as if the\n' - ' object were defined in the enclosing scope.\n' - '\n' - 'Added in version 3.12: Annotation scopes were introduced in ' - 'Python\n' - '3.12 as part of **PEP 695**.\n' - '\n' - 'Changed in version 3.13: Annotation scopes are also used for type\n' - 'parameter defaults, as introduced by **PEP 696**.\n' - '\n' - 'Changed in version 3.14: Annotation scopes are now also used for\n' - 'annotations, as specified in **PEP 649** and **PEP 749**.\n' - '\n' - '\n' - 'Lazy evaluation\n' - '===============\n' - '\n' - 'Most annotation scopes are *lazily evaluated*. This includes\n' - 'annotations, the values of type aliases created through the ' - '"type"\n' - 'statement, and the bounds, constraints, and default values of ' - 'type\n' - 'variables created through the type parameter syntax. This means ' - 'that\n' - 'they are not evaluated when the type alias or type variable is\n' - 'created, or when the object carrying annotations is created. ' - 'Instead,\n' - 'they are only evaluated when necessary, for example when the\n' - '"__value__" attribute on a type alias is accessed.\n' - '\n' - 'Example:\n' - '\n' - ' >>> type Alias = 1/0\n' - ' >>> Alias.__value__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - ' >>> def func[T: 1/0](): pass\n' - ' >>> T = func.__type_params__[0]\n' - ' >>> T.__bound__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - '\n' - 'Here the exception is raised only when the "__value__" attribute ' - 'of\n' - 'the type alias or the "__bound__" attribute of the type variable ' - 'is\n' - 'accessed.\n' - '\n' - 'This behavior is primarily useful for references to types that ' - 'have\n' - 'not yet been defined when the type alias or type variable is ' - 'created.\n' - 'For example, lazy evaluation enables creation of mutually ' - 'recursive\n' - 'type aliases:\n' - '\n' - ' from typing import Literal\n' - '\n' - ' type SimpleExpr = int | Parenthesized\n' - ' type Parenthesized = tuple[Literal["("], Expr, Literal[")"]]\n' - ' type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", "-"], ' - 'Expr]\n' - '\n' - 'Lazily evaluated values are evaluated in annotation scope, which ' - 'means\n' - 'that names that appear inside the lazily evaluated value are ' - 'looked up\n' - 'as if they were used in the immediately enclosing scope.\n' - '\n' - 'Added in version 3.12.\n' - '\n' - '\n' - 'Builtins and restricted execution\n' - '=================================\n' - '\n' - '**CPython implementation detail:** Users should not touch\n' - '"__builtins__"; it is strictly an implementation detail. Users\n' - 'wanting to override values in the builtins namespace should ' - '"import"\n' - 'the "builtins" module and modify its attributes appropriately.\n' - '\n' - 'The builtins namespace associated with the execution of a code ' - 'block\n' - 'is actually found by looking up the name "__builtins__" in its ' - 'global\n' - 'namespace; this should be a dictionary or a module (in the latter ' - 'case\n' - 'the module’s dictionary is used). By default, when in the ' - '"__main__"\n' - 'module, "__builtins__" is the built-in module "builtins"; when in ' - 'any\n' - 'other module, "__builtins__" is an alias for the dictionary of ' - 'the\n' - '"builtins" module itself.\n' - '\n' - '\n' - 'Interaction with dynamic features\n' - '=================================\n' - '\n' - 'Name resolution of free variables occurs at runtime, not at ' - 'compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access to the ' - 'full\n' - 'environment for resolving names. Names may be resolved in the ' - 'local\n' - 'and global namespaces of the caller. Free variables are not ' - 'resolved\n' - 'in the nearest enclosing namespace, but in the global namespace. ' - '[1]\n' - 'The "exec()" and "eval()" functions have optional arguments to\n' - 'override the global and local namespace. If only one namespace ' - 'is\n' - 'specified, it is used for both.\n', - 'nonlocal': 'The "nonlocal" statement\n' - '************************\n' - '\n' - ' nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n' - '\n' - 'When the definition of a function or class is nested (enclosed) ' - 'within\n' - 'the definitions of other functions, its nonlocal scopes are the ' - 'local\n' - 'scopes of the enclosing functions. The "nonlocal" statement ' - 'causes the\n' - 'listed identifiers to refer to names previously bound in ' - 'nonlocal\n' - 'scopes. It allows encapsulated code to rebind such nonlocal\n' - 'identifiers. If a name is bound in more than one nonlocal ' - 'scope, the\n' - 'nearest binding is used. If a name is not bound in any nonlocal ' - 'scope,\n' - 'or if there is no nonlocal scope, a "SyntaxError" is raised.\n' - '\n' - 'The "nonlocal" statement applies to the entire scope of a ' - 'function or\n' - 'class body. A "SyntaxError" is raised if a variable is used or\n' - 'assigned to prior to its nonlocal declaration in the scope.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3104** - Access to Names in Outer Scopes\n' - ' The specification for the "nonlocal" statement.\n' - '\n' - '**Programmer’s note:** "nonlocal" is a directive to the parser ' - 'and\n' - 'applies only to code parsed along with it. See the note for ' - 'the\n' - '"global" statement.\n', - 'numbers': 'Numeric literals\n' - '****************\n' - '\n' - 'There are three types of numeric literals: integers, ' - 'floating-point\n' - 'numbers, and imaginary numbers. There are no complex literals\n' - '(complex numbers can be formed by adding a real number and an\n' - 'imaginary number).\n' - '\n' - 'Note that numeric literals do not include a sign; a phrase like ' - '"-1"\n' - 'is actually an expression composed of the unary operator ‘"-"’ ' - 'and the\n' - 'literal "1".\n', - 'numeric-types': 'Emulating numeric types\n' - '***********************\n' - '\n' - 'The following methods can be defined to emulate numeric ' - 'objects.\n' - 'Methods corresponding to operations that are not supported ' - 'by the\n' - 'particular kind of number implemented (e.g., bitwise ' - 'operations for\n' - 'non-integral numbers) should be left undefined.\n' - '\n' - 'object.__add__(self, other)\n' - 'object.__sub__(self, other)\n' - 'object.__mul__(self, other)\n' - 'object.__matmul__(self, other)\n' - 'object.__truediv__(self, other)\n' - 'object.__floordiv__(self, other)\n' - 'object.__mod__(self, other)\n' - 'object.__divmod__(self, other)\n' - 'object.__pow__(self, other[, modulo])\n' - 'object.__lshift__(self, other)\n' - 'object.__rshift__(self, other)\n' - 'object.__and__(self, other)\n' - 'object.__xor__(self, other)\n' - 'object.__or__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|"). For ' - 'instance, to\n' - ' evaluate the expression "x + y", where *x* is an ' - 'instance of a\n' - ' class that has an "__add__()" method, ' - '"type(x).__add__(x, y)" is\n' - ' called. The "__divmod__()" method should be the ' - 'equivalent to\n' - ' using "__floordiv__()" and "__mod__()"; it should not be ' - 'related to\n' - ' "__truediv__()". Note that "__pow__()" should be ' - 'defined to accept\n' - ' an optional third argument if the ternary version of the ' - 'built-in\n' - ' "pow()" function is to be supported.\n' - '\n' - ' If one of those methods does not support the operation ' - 'with the\n' - ' supplied arguments, it should return "NotImplemented".\n' - '\n' - 'object.__radd__(self, other)\n' - 'object.__rsub__(self, other)\n' - 'object.__rmul__(self, other)\n' - 'object.__rmatmul__(self, other)\n' - 'object.__rtruediv__(self, other)\n' - 'object.__rfloordiv__(self, other)\n' - 'object.__rmod__(self, other)\n' - 'object.__rdivmod__(self, other)\n' - 'object.__rpow__(self, other[, modulo])\n' - 'object.__rlshift__(self, other)\n' - 'object.__rrshift__(self, other)\n' - 'object.__rand__(self, other)\n' - 'object.__rxor__(self, other)\n' - 'object.__ror__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' - '(swapped)\n' - ' operands. These functions are only called if the ' - 'operands are of\n' - ' different types, when the left operand does not support ' - 'the\n' - ' corresponding operation [3], or the right operand’s ' - 'class is\n' - ' derived from the left operand’s class. [4] For instance, ' - 'to\n' - ' evaluate the expression "x - y", where *y* is an ' - 'instance of a\n' - ' class that has an "__rsub__()" method, ' - '"type(y).__rsub__(y, x)" is\n' - ' called if "type(x).__sub__(x, y)" returns ' - '"NotImplemented" or\n' - ' "type(y)" is a subclass of "type(x)". [5]\n' - '\n' - ' Note that ternary "pow()" will not try calling ' - '"__rpow__()" (the\n' - ' coercion rules would become too complicated).\n' - '\n' - ' Note:\n' - '\n' - ' If the right operand’s type is a subclass of the left ' - 'operand’s\n' - ' type and that subclass provides a different ' - 'implementation of the\n' - ' reflected method for the operation, this method will ' - 'be called\n' - ' before the left operand’s non-reflected method. This ' - 'behavior\n' - ' allows subclasses to override their ancestors’ ' - 'operations.\n' - '\n' - 'object.__iadd__(self, other)\n' - 'object.__isub__(self, other)\n' - 'object.__imul__(self, other)\n' - 'object.__imatmul__(self, other)\n' - 'object.__itruediv__(self, other)\n' - 'object.__ifloordiv__(self, other)\n' - 'object.__imod__(self, other)\n' - 'object.__ipow__(self, other[, modulo])\n' - 'object.__ilshift__(self, other)\n' - 'object.__irshift__(self, other)\n' - 'object.__iand__(self, other)\n' - 'object.__ixor__(self, other)\n' - 'object.__ior__(self, other)\n' - '\n' - ' These methods are called to implement the augmented ' - 'arithmetic\n' - ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' - '"**=",\n' - ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' - 'attempt to\n' - ' do the operation in-place (modifying *self*) and return ' - 'the result\n' - ' (which could be, but does not have to be, *self*). If a ' - 'specific\n' - ' method is not defined, or if that method returns ' - '"NotImplemented",\n' - ' the augmented assignment falls back to the normal ' - 'methods. For\n' - ' instance, if *x* is an instance of a class with an ' - '"__iadd__()"\n' - ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' - 'If\n' - ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' - 'returns\n' - ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" ' - 'are\n' - ' considered, as with the evaluation of "x + y". In ' - 'certain\n' - ' situations, augmented assignment can result in ' - 'unexpected errors\n' - ' (see Why does a_tuple[i] += [‘item’] raise an exception ' - 'when the\n' - ' addition works?), but this behavior is in fact part of ' - 'the data\n' - ' model.\n' - '\n' - 'object.__neg__(self)\n' - 'object.__pos__(self)\n' - 'object.__abs__(self)\n' - 'object.__invert__(self)\n' - '\n' - ' Called to implement the unary arithmetic operations ' - '("-", "+",\n' - ' "abs()" and "~").\n' - '\n' - 'object.__complex__(self)\n' - 'object.__int__(self)\n' - 'object.__float__(self)\n' - '\n' - ' Called to implement the built-in functions "complex()", ' - '"int()" and\n' - ' "float()". Should return a value of the appropriate ' - 'type.\n' - '\n' - 'object.__index__(self)\n' - '\n' - ' Called to implement "operator.index()", and whenever ' - 'Python needs\n' - ' to losslessly convert the numeric object to an integer ' - 'object (such\n' - ' as in slicing, or in the built-in "bin()", "hex()" and ' - '"oct()"\n' - ' functions). Presence of this method indicates that the ' - 'numeric\n' - ' object is an integer type. Must return an integer.\n' - '\n' - ' If "__int__()", "__float__()" and "__complex__()" are ' - 'not defined\n' - ' then corresponding built-in functions "int()", "float()" ' - 'and\n' - ' "complex()" fall back to "__index__()".\n' - '\n' - 'object.__round__(self[, ndigits])\n' - 'object.__trunc__(self)\n' - 'object.__floor__(self)\n' - 'object.__ceil__(self)\n' - '\n' - ' Called to implement the built-in function "round()" and ' - '"math"\n' - ' functions "trunc()", "floor()" and "ceil()". Unless ' - '*ndigits* is\n' - ' passed to "__round__()" all these methods should return ' - 'the value\n' - ' of the object truncated to an "Integral" (typically an ' - '"int").\n' - '\n' - ' Changed in version 3.14: "int()" no longer delegates to ' - 'the\n' - ' "__trunc__()" method.\n', - 'objects': 'Objects, values and types\n' - '*************************\n' - '\n' - '*Objects* are Python’s abstraction for data. All data in a ' - 'Python\n' - 'program is represented by objects or by relations between ' - 'objects. (In\n' - 'a sense, and in conformance to Von Neumann’s model of a “stored\n' - 'program computerâ€, code is also represented by objects.)\n' - '\n' - 'Every object has an identity, a type and a value. An object’s\n' - '*identity* never changes once it has been created; you may think ' - 'of it\n' - 'as the object’s address in memory. The "is" operator compares ' - 'the\n' - 'identity of two objects; the "id()" function returns an integer\n' - 'representing its identity.\n' - '\n' - '**CPython implementation detail:** For CPython, "id(x)" is the ' - 'memory\n' - 'address where "x" is stored.\n' - '\n' - 'An object’s type determines the operations that the object ' - 'supports\n' - '(e.g., “does it have a length?â€) and also defines the possible ' - 'values\n' - 'for objects of that type. The "type()" function returns an ' - 'object’s\n' - 'type (which is an object itself). Like its identity, an ' - 'object’s\n' - '*type* is also unchangeable. [1]\n' - '\n' - 'The *value* of some objects can change. Objects whose value can\n' - 'change are said to be *mutable*; objects whose value is ' - 'unchangeable\n' - 'once they are created are called *immutable*. (The value of an\n' - 'immutable container object that contains a reference to a ' - 'mutable\n' - 'object can change when the latter’s value is changed; however ' - 'the\n' - 'container is still considered immutable, because the collection ' - 'of\n' - 'objects it contains cannot be changed. So, immutability is not\n' - 'strictly the same as having an unchangeable value, it is more ' - 'subtle.)\n' - 'An object’s mutability is determined by its type; for instance,\n' - 'numbers, strings and tuples are immutable, while dictionaries ' - 'and\n' - 'lists are mutable.\n' - '\n' - 'Objects are never explicitly destroyed; however, when they ' - 'become\n' - 'unreachable they may be garbage-collected. An implementation is\n' - 'allowed to postpone garbage collection or omit it altogether — it ' - 'is a\n' - 'matter of implementation quality how garbage collection is\n' - 'implemented, as long as no objects are collected that are still\n' - 'reachable.\n' - '\n' - '**CPython implementation detail:** CPython currently uses a ' - 'reference-\n' - 'counting scheme with (optional) delayed detection of cyclically ' - 'linked\n' - 'garbage, which collects most objects as soon as they become\n' - 'unreachable, but is not guaranteed to collect garbage containing\n' - 'circular references. See the documentation of the "gc" module ' - 'for\n' - 'information on controlling the collection of cyclic garbage. ' - 'Other\n' - 'implementations act differently and CPython may change. Do not ' - 'depend\n' - 'on immediate finalization of objects when they become unreachable ' - '(so\n' - 'you should always close files explicitly).\n' - '\n' - 'Note that the use of the implementation’s tracing or debugging\n' - 'facilities may keep objects alive that would normally be ' - 'collectable.\n' - 'Also note that catching an exception with a "try"…"except" ' - 'statement\n' - 'may keep objects alive.\n' - '\n' - 'Some objects contain references to “external†resources such as ' - 'open\n' - 'files or windows. It is understood that these resources are ' - 'freed\n' - 'when the object is garbage-collected, but since garbage ' - 'collection is\n' - 'not guaranteed to happen, such objects also provide an explicit ' - 'way to\n' - 'release the external resource, usually a "close()" method. ' - 'Programs\n' - 'are strongly recommended to explicitly close such objects. The\n' - '"try"…"finally" statement and the "with" statement provide ' - 'convenient\n' - 'ways to do this.\n' - '\n' - 'Some objects contain references to other objects; these are ' - 'called\n' - '*containers*. Examples of containers are tuples, lists and\n' - 'dictionaries. The references are part of a container’s value. ' - 'In\n' - 'most cases, when we talk about the value of a container, we imply ' - 'the\n' - 'values, not the identities of the contained objects; however, ' - 'when we\n' - 'talk about the mutability of a container, only the identities of ' - 'the\n' - 'immediately contained objects are implied. So, if an immutable\n' - 'container (like a tuple) contains a reference to a mutable ' - 'object, its\n' - 'value changes if that mutable object is changed.\n' - '\n' - 'Types affect almost all aspects of object behavior. Even the\n' - 'importance of object identity is affected in some sense: for ' - 'immutable\n' - 'types, operations that compute new values may actually return a\n' - 'reference to any existing object with the same type and value, ' - 'while\n' - 'for mutable objects this is not allowed. For example, after "a = ' - '1; b\n' - '= 1", *a* and *b* may or may not refer to the same object with ' - 'the\n' - 'value one, depending on the implementation. This is because "int" ' - 'is\n' - 'an immutable type, so the reference to "1" can be reused. This\n' - 'behaviour depends on the implementation used, so should not be ' - 'relied\n' - 'upon, but is something to be aware of when making use of object\n' - 'identity tests. However, after "c = []; d = []", *c* and *d* are\n' - 'guaranteed to refer to two different, unique, newly created ' - 'empty\n' - 'lists. (Note that "e = f = []" assigns the *same* object to both ' - '*e*\n' - 'and *f*.)\n', - 'operator-summary': 'Operator precedence\n' - '*******************\n' - '\n' - 'The following table summarizes the operator precedence ' - 'in Python, from\n' - 'highest precedence (most binding) to lowest precedence ' - '(least\n' - 'binding). Operators in the same box have the same ' - 'precedence. Unless\n' - 'the syntax is explicitly given, operators are binary. ' - 'Operators in\n' - 'the same box group left to right (except for ' - 'exponentiation and\n' - 'conditional expressions, which group from right to ' - 'left).\n' - '\n' - 'Note that comparisons, membership tests, and identity ' - 'tests, all have\n' - 'the same precedence and have a left-to-right chaining ' - 'feature as\n' - 'described in the Comparisons section.\n' - '\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| Operator | ' - 'Description |\n' - '|=================================================|=======================================|\n' - '| "(expressions...)", "[expressions...]", "{key: | ' - 'Binding or parenthesized expression, |\n' - '| value...}", "{expressions...}" | list ' - 'display, dictionary display, set |\n' - '| | ' - 'display |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "x[index]", "x[index:index]", | ' - 'Subscription, slicing, call, |\n' - '| "x(arguments...)", "x.attribute" | ' - 'attribute reference |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "await x" | ' - 'Await expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "**" | ' - 'Exponentiation [5] |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "+x", "-x", "~x" | ' - 'Positive, negative, bitwise NOT |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "*", "@", "/", "//", "%" | ' - 'Multiplication, matrix |\n' - '| | ' - 'multiplication, division, floor |\n' - '| | ' - 'division, remainder [6] |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "+", "-" | ' - 'Addition and subtraction |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "<<", ">>" | ' - 'Shifts |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "&" | ' - 'Bitwise AND |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "^" | ' - 'Bitwise XOR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "|" | ' - 'Bitwise OR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "in", "not in", "is", "is not", "<", "<=", ">", | ' - 'Comparisons, including membership |\n' - '| ">=", "!=", "==" | ' - 'tests and identity tests |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "not x" | ' - 'Boolean NOT |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "and" | ' - 'Boolean AND |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "or" | ' - 'Boolean OR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "if" – "else" | ' - 'Conditional expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "lambda" | ' - 'Lambda expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| ":=" | ' - 'Assignment expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] While "abs(x%y) < abs(y)" is true mathematically, ' - 'for floats it\n' - ' may not be true numerically due to roundoff. For ' - 'example, and\n' - ' assuming a platform on which a Python float is an ' - 'IEEE 754 double-\n' - ' precision number, in order that "-1e-100 % 1e100" ' - 'have the same\n' - ' sign as "1e100", the computed result is "-1e-100 + ' - '1e100", which\n' - ' is numerically exactly equal to "1e100". The ' - 'function\n' - ' "math.fmod()" returns a result whose sign matches ' - 'the sign of the\n' - ' first argument instead, and so returns "-1e-100" in ' - 'this case.\n' - ' Which approach is more appropriate depends on the ' - 'application.\n' - '\n' - '[2] If x is very close to an exact integer multiple of ' - 'y, it’s\n' - ' possible for "x//y" to be one larger than ' - '"(x-x%y)//y" due to\n' - ' rounding. In such cases, Python returns the latter ' - 'result, in\n' - ' order to preserve that "divmod(x,y)[0] * y + x % y" ' - 'be very close\n' - ' to "x".\n' - '\n' - '[3] The Unicode standard distinguishes between *code ' - 'points* (e.g.\n' - ' U+0041) and *abstract characters* (e.g. “LATIN ' - 'CAPITAL LETTER Aâ€).\n' - ' While most abstract characters in Unicode are only ' - 'represented\n' - ' using one code point, there is a number of abstract ' - 'characters\n' - ' that can in addition be represented using a sequence ' - 'of more than\n' - ' one code point. For example, the abstract character ' - '“LATIN\n' - ' CAPITAL LETTER C WITH CEDILLA†can be represented as ' - 'a single\n' - ' *precomposed character* at code position U+00C7, or ' - 'as a sequence\n' - ' of a *base character* at code position U+0043 (LATIN ' - 'CAPITAL\n' - ' LETTER C), followed by a *combining character* at ' - 'code position\n' - ' U+0327 (COMBINING CEDILLA).\n' - '\n' - ' The comparison operators on strings compare at the ' - 'level of\n' - ' Unicode code points. This may be counter-intuitive ' - 'to humans. For\n' - ' example, ""\\u00C7" == "\\u0043\\u0327"" is "False", ' - 'even though both\n' - ' strings represent the same abstract character “LATIN ' - 'CAPITAL\n' - ' LETTER C WITH CEDILLAâ€.\n' - '\n' - ' To compare strings at the level of abstract ' - 'characters (that is,\n' - ' in a way intuitive to humans), use ' - '"unicodedata.normalize()".\n' - '\n' - '[4] Due to automatic garbage-collection, free lists, and ' - 'the dynamic\n' - ' nature of descriptors, you may notice seemingly ' - 'unusual behaviour\n' - ' in certain uses of the "is" operator, like those ' - 'involving\n' - ' comparisons between instance methods, or constants. ' - 'Check their\n' - ' documentation for more info.\n' - '\n' - '[5] The power operator "**" binds less tightly than an ' - 'arithmetic or\n' - ' bitwise unary operator on its right, that is, ' - '"2**-1" is "0.5".\n' - '\n' - '[6] The "%" operator is also used for string formatting; ' - 'the same\n' - ' precedence applies.\n', - 'pass': 'The "pass" statement\n' - '********************\n' - '\n' - ' pass_stmt ::= "pass"\n' - '\n' - '"pass" is a null operation — when it is executed, nothing happens. ' - 'It\n' - 'is useful as a placeholder when a statement is required ' - 'syntactically,\n' - 'but no code needs to be executed, for example:\n' - '\n' - ' def f(arg): pass # a function that does nothing (yet)\n' - '\n' - ' class C: pass # a class with no methods (yet)\n', - 'power': 'The power operator\n' - '******************\n' - '\n' - 'The power operator binds more tightly than unary operators on its\n' - 'left; it binds less tightly than unary operators on its right. ' - 'The\n' - 'syntax is:\n' - '\n' - ' power ::= (await_expr | primary) ["**" u_expr]\n' - '\n' - 'Thus, in an unparenthesized sequence of power and unary operators, ' - 'the\n' - 'operators are evaluated from right to left (this does not ' - 'constrain\n' - 'the evaluation order for the operands): "-1**2" results in "-1".\n' - '\n' - 'The power operator has the same semantics as the built-in "pow()"\n' - 'function, when called with two arguments: it yields its left ' - 'argument\n' - 'raised to the power of its right argument. The numeric arguments ' - 'are\n' - 'first converted to a common type, and the result is of that type.\n' - '\n' - 'For int operands, the result has the same type as the operands ' - 'unless\n' - 'the second argument is negative; in that case, all arguments are\n' - 'converted to float and a float result is delivered. For example,\n' - '"10**2" returns "100", but "10**-2" returns "0.01".\n' - '\n' - 'Raising "0.0" to a negative power results in a ' - '"ZeroDivisionError".\n' - 'Raising a negative number to a fractional power results in a ' - '"complex"\n' - 'number. (In earlier versions it raised a "ValueError".)\n' - '\n' - 'This operation can be customized using the special "__pow__()" and\n' - '"__rpow__()" methods.\n', - 'raise': 'The "raise" statement\n' - '*********************\n' - '\n' - ' raise_stmt ::= "raise" [expression ["from" expression]]\n' - '\n' - 'If no expressions are present, "raise" re-raises the exception that ' - 'is\n' - 'currently being handled, which is also known as the *active\n' - 'exception*. If there isn’t currently an active exception, a\n' - '"RuntimeError" exception is raised indicating that this is an ' - 'error.\n' - '\n' - 'Otherwise, "raise" evaluates the first expression as the exception\n' - 'object. It must be either a subclass or an instance of\n' - '"BaseException". If it is a class, the exception instance will be\n' - 'obtained when needed by instantiating the class with no arguments.\n' - '\n' - 'The *type* of the exception is the exception instance’s class, the\n' - '*value* is the instance itself.\n' - '\n' - 'A traceback object is normally created automatically when an ' - 'exception\n' - 'is raised and attached to it as the "__traceback__" attribute. You ' - 'can\n' - 'create an exception and set your own traceback in one step using ' - 'the\n' - '"with_traceback()" exception method (which returns the same ' - 'exception\n' - 'instance, with its traceback set to its argument), like so:\n' - '\n' - ' raise Exception("foo occurred").with_traceback(tracebackobj)\n' - '\n' - 'The "from" clause is used for exception chaining: if given, the ' - 'second\n' - '*expression* must be another exception class or instance. If the\n' - 'second expression is an exception instance, it will be attached to ' - 'the\n' - 'raised exception as the "__cause__" attribute (which is writable). ' - 'If\n' - 'the expression is an exception class, the class will be ' - 'instantiated\n' - 'and the resulting exception instance will be attached to the ' - 'raised\n' - 'exception as the "__cause__" attribute. If the raised exception is ' - 'not\n' - 'handled, both exceptions will be printed:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except Exception as exc:\n' - ' ... raise RuntimeError("Something bad happened") from exc\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 2, in \n' - ' print(1 / 0)\n' - ' ~~^~~\n' - ' ZeroDivisionError: division by zero\n' - '\n' - ' The above exception was the direct cause of the following ' - 'exception:\n' - '\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' raise RuntimeError("Something bad happened") from exc\n' - ' RuntimeError: Something bad happened\n' - '\n' - 'A similar mechanism works implicitly if a new exception is raised ' - 'when\n' - 'an exception is already being handled. An exception may be ' - 'handled\n' - 'when an "except" or "finally" clause, or a "with" statement, is ' - 'used.\n' - 'The previous exception is then attached as the new exception’s\n' - '"__context__" attribute:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except:\n' - ' ... raise RuntimeError("Something bad happened")\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 2, in \n' - ' print(1 / 0)\n' - ' ~~^~~\n' - ' ZeroDivisionError: division by zero\n' - '\n' - ' During handling of the above exception, another exception ' - 'occurred:\n' - '\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' raise RuntimeError("Something bad happened")\n' - ' RuntimeError: Something bad happened\n' - '\n' - 'Exception chaining can be explicitly suppressed by specifying ' - '"None"\n' - 'in the "from" clause:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except:\n' - ' ... raise RuntimeError("Something bad happened") from None\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' RuntimeError: Something bad happened\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information about handling exceptions is in ' - 'section\n' - 'The try statement.\n' - '\n' - 'Changed in version 3.3: "None" is now permitted as "Y" in "raise X\n' - 'from Y".Added the "__suppress_context__" attribute to suppress\n' - 'automatic display of the exception context.\n' - '\n' - 'Changed in version 3.11: If the traceback of the active exception ' - 'is\n' - 'modified in an "except" clause, a subsequent "raise" statement re-\n' - 'raises the exception with the modified traceback. Previously, the\n' - 'exception was re-raised with the traceback it had when it was ' - 'caught.\n', - 'return': 'The "return" statement\n' - '**********************\n' - '\n' - ' return_stmt ::= "return" [expression_list]\n' - '\n' - '"return" may only occur syntactically nested in a function ' - 'definition,\n' - 'not within a nested class definition.\n' - '\n' - 'If an expression list is present, it is evaluated, else "None" is\n' - 'substituted.\n' - '\n' - '"return" leaves the current function call with the expression list ' - '(or\n' - '"None") as return value.\n' - '\n' - 'When "return" passes control out of a "try" statement with a ' - '"finally"\n' - 'clause, that "finally" clause is executed before really leaving ' - 'the\n' - 'function.\n' - '\n' - 'In a generator function, the "return" statement indicates that ' - 'the\n' - 'generator is done and will cause "StopIteration" to be raised. ' - 'The\n' - 'returned value (if any) is used as an argument to construct\n' - '"StopIteration" and becomes the "StopIteration.value" attribute.\n' - '\n' - 'In an asynchronous generator function, an empty "return" ' - 'statement\n' - 'indicates that the asynchronous generator is done and will cause\n' - '"StopAsyncIteration" to be raised. A non-empty "return" statement ' - 'is\n' - 'a syntax error in an asynchronous generator function.\n', - 'sequence-types': 'Emulating container types\n' - '*************************\n' - '\n' - 'The following methods can be defined to implement ' - 'container objects.\n' - 'None of them are provided by the "object" class itself. ' - 'Containers\n' - 'usually are *sequences* (such as "lists" or "tuples") or ' - '*mappings*\n' - '(like *dictionaries*), but can represent other containers ' - 'as well.\n' - 'The first set of methods is used either to emulate a ' - 'sequence or to\n' - 'emulate a mapping; the difference is that for a sequence, ' - 'the\n' - 'allowable keys should be the integers *k* for which "0 <= ' - 'k < N" where\n' - '*N* is the length of the sequence, or "slice" objects, ' - 'which define a\n' - 'range of items. It is also recommended that mappings ' - 'provide the\n' - 'methods "keys()", "values()", "items()", "get()", ' - '"clear()",\n' - '"setdefault()", "pop()", "popitem()", "copy()", and ' - '"update()"\n' - 'behaving similar to those for Python’s standard ' - '"dictionary" objects.\n' - 'The "collections.abc" module provides a "MutableMapping" ' - '*abstract\n' - 'base class* to help create those methods from a base set ' - 'of\n' - '"__getitem__()", "__setitem__()", "__delitem__()", and ' - '"keys()".\n' - 'Mutable sequences should provide methods "append()", ' - '"count()",\n' - '"index()", "extend()", "insert()", "pop()", "remove()", ' - '"reverse()"\n' - 'and "sort()", like Python standard "list" objects. ' - 'Finally, sequence\n' - 'types should implement addition (meaning concatenation) ' - 'and\n' - 'multiplication (meaning repetition) by defining the ' - 'methods\n' - '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' - '"__rmul__()" and\n' - '"__imul__()" described below; they should not define other ' - 'numerical\n' - 'operators. It is recommended that both mappings and ' - 'sequences\n' - 'implement the "__contains__()" method to allow efficient ' - 'use of the\n' - '"in" operator; for mappings, "in" should search the ' - 'mapping’s keys;\n' - 'for sequences, it should search through the values. It is ' - 'further\n' - 'recommended that both mappings and sequences implement ' - 'the\n' - '"__iter__()" method to allow efficient iteration through ' - 'the\n' - 'container; for mappings, "__iter__()" should iterate ' - 'through the\n' - 'object’s keys; for sequences, it should iterate through ' - 'the values.\n' - '\n' - 'object.__len__(self)\n' - '\n' - ' Called to implement the built-in function "len()". ' - 'Should return\n' - ' the length of the object, an integer ">=" 0. Also, an ' - 'object that\n' - ' doesn’t define a "__bool__()" method and whose ' - '"__len__()" method\n' - ' returns zero is considered to be false in a Boolean ' - 'context.\n' - '\n' - ' **CPython implementation detail:** In CPython, the ' - 'length is\n' - ' required to be at most "sys.maxsize". If the length is ' - 'larger than\n' - ' "sys.maxsize" some features (such as "len()") may ' - 'raise\n' - ' "OverflowError". To prevent raising "OverflowError" by ' - 'truth value\n' - ' testing, an object must define a "__bool__()" method.\n' - '\n' - 'object.__length_hint__(self)\n' - '\n' - ' Called to implement "operator.length_hint()". Should ' - 'return an\n' - ' estimated length for the object (which may be greater ' - 'or less than\n' - ' the actual length). The length must be an integer ">=" ' - '0. The\n' - ' return value may also be "NotImplemented", which is ' - 'treated the\n' - ' same as if the "__length_hint__" method didn’t exist at ' - 'all. This\n' - ' method is purely an optimization and is never required ' - 'for\n' - ' correctness.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - 'Note:\n' - '\n' - ' Slicing is done exclusively with the following three ' - 'methods. A\n' - ' call like\n' - '\n' - ' a[1:2] = b\n' - '\n' - ' is translated to\n' - '\n' - ' a[slice(1, 2, None)] = b\n' - '\n' - ' and so forth. Missing slice items are always filled in ' - 'with "None".\n' - '\n' - 'object.__getitem__(self, key)\n' - '\n' - ' Called to implement evaluation of "self[key]". For ' - '*sequence*\n' - ' types, the accepted keys should be integers. ' - 'Optionally, they may\n' - ' support "slice" objects as well. Negative index ' - 'support is also\n' - ' optional. If *key* is of an inappropriate type, ' - '"TypeError" may be\n' - ' raised; if *key* is a value outside the set of indexes ' - 'for the\n' - ' sequence (after any special interpretation of negative ' - 'values),\n' - ' "IndexError" should be raised. For *mapping* types, if ' - '*key* is\n' - ' missing (not in the container), "KeyError" should be ' - 'raised.\n' - '\n' - ' Note:\n' - '\n' - ' "for" loops expect that an "IndexError" will be ' - 'raised for\n' - ' illegal indexes to allow proper detection of the end ' - 'of the\n' - ' sequence.\n' - '\n' - ' Note:\n' - '\n' - ' When subscripting a *class*, the special class ' - 'method\n' - ' "__class_getitem__()" may be called instead of ' - '"__getitem__()".\n' - ' See __class_getitem__ versus __getitem__ for more ' - 'details.\n' - '\n' - 'object.__setitem__(self, key, value)\n' - '\n' - ' Called to implement assignment to "self[key]". Same ' - 'note as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support changes to the values for keys, or ' - 'if new keys\n' - ' can be added, or for sequences if elements can be ' - 'replaced. The\n' - ' same exceptions should be raised for improper *key* ' - 'values as for\n' - ' the "__getitem__()" method.\n' - '\n' - 'object.__delitem__(self, key)\n' - '\n' - ' Called to implement deletion of "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support removal of keys, or for sequences ' - 'if elements\n' - ' can be removed from the sequence. The same exceptions ' - 'should be\n' - ' raised for improper *key* values as for the ' - '"__getitem__()" method.\n' - '\n' - 'object.__missing__(self, key)\n' - '\n' - ' Called by "dict"."__getitem__()" to implement ' - '"self[key]" for dict\n' - ' subclasses when key is not in the dictionary.\n' - '\n' - 'object.__iter__(self)\n' - '\n' - ' This method is called when an *iterator* is required ' - 'for a\n' - ' container. This method should return a new iterator ' - 'object that can\n' - ' iterate over all the objects in the container. For ' - 'mappings, it\n' - ' should iterate over the keys of the container.\n' - '\n' - 'object.__reversed__(self)\n' - '\n' - ' Called (if present) by the "reversed()" built-in to ' - 'implement\n' - ' reverse iteration. It should return a new iterator ' - 'object that\n' - ' iterates over all the objects in the container in ' - 'reverse order.\n' - '\n' - ' If the "__reversed__()" method is not provided, the ' - '"reversed()"\n' - ' built-in will fall back to using the sequence protocol ' - '("__len__()"\n' - ' and "__getitem__()"). Objects that support the ' - 'sequence protocol\n' - ' should only provide "__reversed__()" if they can ' - 'provide an\n' - ' implementation that is more efficient than the one ' - 'provided by\n' - ' "reversed()".\n' - '\n' - 'The membership test operators ("in" and "not in") are ' - 'normally\n' - 'implemented as an iteration through a container. However, ' - 'container\n' - 'objects can supply the following special method with a ' - 'more efficient\n' - 'implementation, which also does not require the object be ' - 'iterable.\n' - '\n' - 'object.__contains__(self, item)\n' - '\n' - ' Called to implement membership test operators. Should ' - 'return true\n' - ' if *item* is in *self*, false otherwise. For mapping ' - 'objects, this\n' - ' should consider the keys of the mapping rather than the ' - 'values or\n' - ' the key-item pairs.\n' - '\n' - ' For objects that don’t define "__contains__()", the ' - 'membership test\n' - ' first tries iteration via "__iter__()", then the old ' - 'sequence\n' - ' iteration protocol via "__getitem__()", see this ' - 'section in the\n' - ' language reference.\n', - 'shifting': 'Shifting operations\n' - '*******************\n' - '\n' - 'The shifting operations have lower priority than the arithmetic\n' - 'operations:\n' - '\n' - ' shift_expr ::= a_expr | shift_expr ("<<" | ">>") a_expr\n' - '\n' - 'These operators accept integers as arguments. They shift the ' - 'first\n' - 'argument to the left or right by the number of bits given by ' - 'the\n' - 'second argument.\n' - '\n' - 'The left shift operation can be customized using the special\n' - '"__lshift__()" and "__rlshift__()" methods. The right shift ' - 'operation\n' - 'can be customized using the special "__rshift__()" and ' - '"__rrshift__()"\n' - 'methods.\n' - '\n' - 'A right shift by *n* bits is defined as floor division by ' - '"pow(2,n)".\n' - 'A left shift by *n* bits is defined as multiplication with ' - '"pow(2,n)".\n', - 'slicings': 'Slicings\n' - '********\n' - '\n' - 'A slicing selects a range of items in a sequence object (e.g., ' - 'a\n' - 'string, tuple or list). Slicings may be used as expressions or ' - 'as\n' - 'targets in assignment or "del" statements. The syntax for a ' - 'slicing:\n' - '\n' - ' slicing ::= primary "[" slice_list "]"\n' - ' slice_list ::= slice_item ("," slice_item)* [","]\n' - ' slice_item ::= expression | proper_slice\n' - ' proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" ' - '[stride] ]\n' - ' lower_bound ::= expression\n' - ' upper_bound ::= expression\n' - ' stride ::= expression\n' - '\n' - 'There is ambiguity in the formal syntax here: anything that ' - 'looks like\n' - 'an expression list also looks like a slice list, so any ' - 'subscription\n' - 'can be interpreted as a slicing. Rather than further ' - 'complicating the\n' - 'syntax, this is disambiguated by defining that in this case the\n' - 'interpretation as a subscription takes priority over the\n' - 'interpretation as a slicing (this is the case if the slice list\n' - 'contains no proper slice).\n' - '\n' - 'The semantics for a slicing are as follows. The primary is ' - 'indexed\n' - '(using the same "__getitem__()" method as normal subscription) ' - 'with a\n' - 'key that is constructed from the slice list, as follows. If the ' - 'slice\n' - 'list contains at least one comma, the key is a tuple containing ' - 'the\n' - 'conversion of the slice items; otherwise, the conversion of the ' - 'lone\n' - 'slice item is the key. The conversion of a slice item that is ' - 'an\n' - 'expression is that expression. The conversion of a proper slice ' - 'is a\n' - 'slice object (see section The standard type hierarchy) whose ' - '"start",\n' - '"stop" and "step" attributes are the values of the expressions ' - 'given\n' - 'as lower bound, upper bound and stride, respectively, ' - 'substituting\n' - '"None" for missing expressions.\n', - 'specialattrs': 'Special Attributes\n' - '******************\n' - '\n' - 'The implementation adds a few special read-only attributes ' - 'to several\n' - 'object types, where they are relevant. Some of these are ' - 'not reported\n' - 'by the "dir()" built-in function.\n' - '\n' - 'definition.__name__\n' - '\n' - ' The name of the class, function, method, descriptor, or ' - 'generator\n' - ' instance.\n' - '\n' - 'definition.__qualname__\n' - '\n' - ' The *qualified name* of the class, function, method, ' - 'descriptor, or\n' - ' generator instance.\n' - '\n' - ' Added in version 3.3.\n' - '\n' - 'definition.__module__\n' - '\n' - ' The name of the module in which a class or function was ' - 'defined.\n' - '\n' - 'definition.__doc__\n' - '\n' - ' The documentation string of a class or function, or ' - '"None" if\n' - ' undefined.\n' - '\n' - 'definition.__type_params__\n' - '\n' - ' The type parameters of generic classes, functions, and ' - 'type\n' - ' aliases. For classes and functions that are not generic, ' - 'this will\n' - ' be an empty tuple.\n' - '\n' - ' Added in version 3.12.\n', - 'specialnames': 'Special method names\n' - '********************\n' - '\n' - 'A class can implement certain operations that are invoked by ' - 'special\n' - 'syntax (such as arithmetic operations or subscripting and ' - 'slicing) by\n' - 'defining methods with special names. This is Python’s ' - 'approach to\n' - '*operator overloading*, allowing classes to define their own ' - 'behavior\n' - 'with respect to language operators. For instance, if a ' - 'class defines\n' - 'a method named "__getitem__()", and "x" is an instance of ' - 'this class,\n' - 'then "x[i]" is roughly equivalent to "type(x).__getitem__(x, ' - 'i)".\n' - 'Except where mentioned, attempts to execute an operation ' - 'raise an\n' - 'exception when no appropriate method is defined (typically\n' - '"AttributeError" or "TypeError").\n' - '\n' - 'Setting a special method to "None" indicates that the ' - 'corresponding\n' - 'operation is not available. For example, if a class sets ' - '"__iter__()"\n' - 'to "None", the class is not iterable, so calling "iter()" on ' - 'its\n' - 'instances will raise a "TypeError" (without falling back to\n' - '"__getitem__()"). [2]\n' - '\n' - 'When implementing a class that emulates any built-in type, ' - 'it is\n' - 'important that the emulation only be implemented to the ' - 'degree that it\n' - 'makes sense for the object being modelled. For example, ' - 'some\n' - 'sequences may work well with retrieval of individual ' - 'elements, but\n' - 'extracting a slice may not make sense. (One example of this ' - 'is the\n' - '"NodeList" interface in the W3C’s Document Object Model.)\n' - '\n' - '\n' - 'Basic customization\n' - '===================\n' - '\n' - 'object.__new__(cls[, ...])\n' - '\n' - ' Called to create a new instance of class *cls*. ' - '"__new__()" is a\n' - ' static method (special-cased so you need not declare it ' - 'as such)\n' - ' that takes the class of which an instance was requested ' - 'as its\n' - ' first argument. The remaining arguments are those passed ' - 'to the\n' - ' object constructor expression (the call to the class). ' - 'The return\n' - ' value of "__new__()" should be the new object instance ' - '(usually an\n' - ' instance of *cls*).\n' - '\n' - ' Typical implementations create a new instance of the ' - 'class by\n' - ' invoking the superclass’s "__new__()" method using\n' - ' "super().__new__(cls[, ...])" with appropriate arguments ' - 'and then\n' - ' modifying the newly created instance as necessary before ' - 'returning\n' - ' it.\n' - '\n' - ' If "__new__()" is invoked during object construction and ' - 'it returns\n' - ' an instance of *cls*, then the new instance’s ' - '"__init__()" method\n' - ' will be invoked like "__init__(self[, ...])", where ' - '*self* is the\n' - ' new instance and the remaining arguments are the same as ' - 'were\n' - ' passed to the object constructor.\n' - '\n' - ' If "__new__()" does not return an instance of *cls*, then ' - 'the new\n' - ' instance’s "__init__()" method will not be invoked.\n' - '\n' - ' "__new__()" is intended mainly to allow subclasses of ' - 'immutable\n' - ' types (like int, str, or tuple) to customize instance ' - 'creation. It\n' - ' is also commonly overridden in custom metaclasses in ' - 'order to\n' - ' customize class creation.\n' - '\n' - 'object.__init__(self[, ...])\n' - '\n' - ' Called after the instance has been created (by ' - '"__new__()"), but\n' - ' before it is returned to the caller. The arguments are ' - 'those\n' - ' passed to the class constructor expression. If a base ' - 'class has an\n' - ' "__init__()" method, the derived class’s "__init__()" ' - 'method, if\n' - ' any, must explicitly call it to ensure proper ' - 'initialization of the\n' - ' base class part of the instance; for example:\n' - ' "super().__init__([args...])".\n' - '\n' - ' Because "__new__()" and "__init__()" work together in ' - 'constructing\n' - ' objects ("__new__()" to create it, and "__init__()" to ' - 'customize\n' - ' it), no non-"None" value may be returned by "__init__()"; ' - 'doing so\n' - ' will cause a "TypeError" to be raised at runtime.\n' - '\n' - 'object.__del__(self)\n' - '\n' - ' Called when the instance is about to be destroyed. This ' - 'is also\n' - ' called a finalizer or (improperly) a destructor. If a ' - 'base class\n' - ' has a "__del__()" method, the derived class’s "__del__()" ' - 'method,\n' - ' if any, must explicitly call it to ensure proper deletion ' - 'of the\n' - ' base class part of the instance.\n' - '\n' - ' It is possible (though not recommended!) for the ' - '"__del__()" method\n' - ' to postpone destruction of the instance by creating a new ' - 'reference\n' - ' to it. This is called object *resurrection*. It is\n' - ' implementation-dependent whether "__del__()" is called a ' - 'second\n' - ' time when a resurrected object is about to be destroyed; ' - 'the\n' - ' current *CPython* implementation only calls it once.\n' - '\n' - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' - ' "weakref.finalize" provides a straightforward way to ' - 'register a\n' - ' cleanup function to be called when an object is garbage ' - 'collected.\n' - '\n' - ' Note:\n' - '\n' - ' "del x" doesn’t directly call "x.__del__()" — the ' - 'former\n' - ' decrements the reference count for "x" by one, and the ' - 'latter is\n' - ' only called when "x"’s reference count reaches zero.\n' - '\n' - ' **CPython implementation detail:** It is possible for a ' - 'reference\n' - ' cycle to prevent the reference count of an object from ' - 'going to\n' - ' zero. In this case, the cycle will be later detected and ' - 'deleted\n' - ' by the *cyclic garbage collector*. A common cause of ' - 'reference\n' - ' cycles is when an exception has been caught in a local ' - 'variable.\n' - ' The frame’s locals then reference the exception, which ' - 'references\n' - ' its own traceback, which references the locals of all ' - 'frames caught\n' - ' in the traceback.\n' - '\n' - ' See also: Documentation for the "gc" module.\n' - '\n' - ' Warning:\n' - '\n' - ' Due to the precarious circumstances under which ' - '"__del__()"\n' - ' methods are invoked, exceptions that occur during their ' - 'execution\n' - ' are ignored, and a warning is printed to "sys.stderr" ' - 'instead.\n' - ' In particular:\n' - '\n' - ' * "__del__()" can be invoked when arbitrary code is ' - 'being\n' - ' executed, including from any arbitrary thread. If ' - '"__del__()"\n' - ' needs to take a lock or invoke any other blocking ' - 'resource, it\n' - ' may deadlock as the resource may already be taken by ' - 'the code\n' - ' that gets interrupted to execute "__del__()".\n' - '\n' - ' * "__del__()" can be executed during interpreter ' - 'shutdown. As a\n' - ' consequence, the global variables it needs to access ' - '(including\n' - ' other modules) may already have been deleted or set ' - 'to "None".\n' - ' Python guarantees that globals whose name begins with ' - 'a single\n' - ' underscore are deleted from their module before other ' - 'globals\n' - ' are deleted; if no other references to such globals ' - 'exist, this\n' - ' may help in assuring that imported modules are still ' - 'available\n' - ' at the time when the "__del__()" method is called.\n' - '\n' - 'object.__repr__(self)\n' - '\n' - ' Called by the "repr()" built-in function to compute the ' - '“officialâ€\n' - ' string representation of an object. If at all possible, ' - 'this\n' - ' should look like a valid Python expression that could be ' - 'used to\n' - ' recreate an object with the same value (given an ' - 'appropriate\n' - ' environment). If this is not possible, a string of the ' - 'form\n' - ' "<...some useful description...>" should be returned. The ' - 'return\n' - ' value must be a string object. If a class defines ' - '"__repr__()" but\n' - ' not "__str__()", then "__repr__()" is also used when an ' - '“informalâ€\n' - ' string representation of instances of that class is ' - 'required.\n' - '\n' - ' This is typically used for debugging, so it is important ' - 'that the\n' - ' representation is information-rich and unambiguous. A ' - 'default\n' - ' implementation is provided by the "object" class itself.\n' - '\n' - 'object.__str__(self)\n' - '\n' - ' Called by "str(object)", the default "__format__()" ' - 'implementation,\n' - ' and the built-in function "print()", to compute the ' - '“informal†or\n' - ' nicely printable string representation of an object. The ' - 'return\n' - ' value must be a str object.\n' - '\n' - ' This method differs from "object.__repr__()" in that ' - 'there is no\n' - ' expectation that "__str__()" return a valid Python ' - 'expression: a\n' - ' more convenient or concise representation can be used.\n' - '\n' - ' The default implementation defined by the built-in type ' - '"object"\n' - ' calls "object.__repr__()".\n' - '\n' - 'object.__bytes__(self)\n' - '\n' - ' Called by bytes to compute a byte-string representation ' - 'of an\n' - ' object. This should return a "bytes" object. The "object" ' - 'class\n' - ' itself does not provide this method.\n' - '\n' - 'object.__format__(self, format_spec)\n' - '\n' - ' Called by the "format()" built-in function, and by ' - 'extension,\n' - ' evaluation of formatted string literals and the ' - '"str.format()"\n' - ' method, to produce a “formatted†string representation of ' - 'an\n' - ' object. The *format_spec* argument is a string that ' - 'contains a\n' - ' description of the formatting options desired. The ' - 'interpretation\n' - ' of the *format_spec* argument is up to the type ' - 'implementing\n' - ' "__format__()", however most classes will either ' - 'delegate\n' - ' formatting to one of the built-in types, or use a ' - 'similar\n' - ' formatting option syntax.\n' - '\n' - ' See Format Specification Mini-Language for a description ' - 'of the\n' - ' standard formatting syntax.\n' - '\n' - ' The return value must be a string object.\n' - '\n' - ' The default implementation by the "object" class should ' - 'be given an\n' - ' empty *format_spec* string. It delegates to "__str__()".\n' - '\n' - ' Changed in version 3.4: The __format__ method of "object" ' - 'itself\n' - ' raises a "TypeError" if passed any non-empty string.\n' - '\n' - ' Changed in version 3.7: "object.__format__(x, \'\')" is ' - 'now\n' - ' equivalent to "str(x)" rather than "format(str(x), ' - '\'\')".\n' - '\n' - 'object.__lt__(self, other)\n' - 'object.__le__(self, other)\n' - 'object.__eq__(self, other)\n' - 'object.__ne__(self, other)\n' - 'object.__gt__(self, other)\n' - 'object.__ge__(self, other)\n' - '\n' - ' These are the so-called “rich comparison†methods. The\n' - ' correspondence between operator symbols and method names ' - 'is as\n' - ' follows: "xy" calls\n' - ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' - '\n' - ' A rich comparison method may return the singleton ' - '"NotImplemented"\n' - ' if it does not implement the operation for a given pair ' - 'of\n' - ' arguments. By convention, "False" and "True" are returned ' - 'for a\n' - ' successful comparison. However, these methods can return ' - 'any value,\n' - ' so if the comparison operator is used in a Boolean ' - 'context (e.g.,\n' - ' in the condition of an "if" statement), Python will call ' - '"bool()"\n' - ' on the value to determine if the result is true or ' - 'false.\n' - '\n' - ' By default, "object" implements "__eq__()" by using "is", ' - 'returning\n' - ' "NotImplemented" in the case of a false comparison: "True ' - 'if x is y\n' - ' else NotImplemented". For "__ne__()", by default it ' - 'delegates to\n' - ' "__eq__()" and inverts the result unless it is ' - '"NotImplemented".\n' - ' There are no other implied relationships among the ' - 'comparison\n' - ' operators or default implementations; for example, the ' - 'truth of\n' - ' "(x.__hash__".\n' - '\n' - ' If a class that does not override "__eq__()" wishes to ' - 'suppress\n' - ' hash support, it should include "__hash__ = None" in the ' - 'class\n' - ' definition. A class which defines its own "__hash__()" ' - 'that\n' - ' explicitly raises a "TypeError" would be incorrectly ' - 'identified as\n' - ' hashable by an "isinstance(obj, ' - 'collections.abc.Hashable)" call.\n' - '\n' - ' Note:\n' - '\n' - ' By default, the "__hash__()" values of str and bytes ' - 'objects are\n' - ' “salted†with an unpredictable random value. Although ' - 'they\n' - ' remain constant within an individual Python process, ' - 'they are not\n' - ' predictable between repeated invocations of Python.This ' - 'is\n' - ' intended to provide protection against a ' - 'denial-of-service caused\n' - ' by carefully chosen inputs that exploit the worst case\n' - ' performance of a dict insertion, *O*(*n*^2) ' - 'complexity. See\n' - ' http://ocert.org/advisories/ocert-2011-003.html for\n' - ' details.Changing hash values affects the iteration ' - 'order of sets.\n' - ' Python has never made guarantees about this ordering ' - '(and it\n' - ' typically varies between 32-bit and 64-bit builds).See ' - 'also\n' - ' "PYTHONHASHSEED".\n' - '\n' - ' Changed in version 3.3: Hash randomization is enabled by ' - 'default.\n' - '\n' - 'object.__bool__(self)\n' - '\n' - ' Called to implement truth value testing and the built-in ' - 'operation\n' - ' "bool()"; should return "False" or "True". When this ' - 'method is not\n' - ' defined, "__len__()" is called, if it is defined, and the ' - 'object is\n' - ' considered true if its result is nonzero. If a class ' - 'defines\n' - ' neither "__len__()" nor "__bool__()" (which is true of ' - 'the "object"\n' - ' class itself), all its instances are considered true.\n' - '\n' - '\n' - 'Customizing attribute access\n' - '============================\n' - '\n' - 'The following methods can be defined to customize the ' - 'meaning of\n' - 'attribute access (use of, assignment to, or deletion of ' - '"x.name") for\n' - 'class instances.\n' - '\n' - 'object.__getattr__(self, name)\n' - '\n' - ' Called when the default attribute access fails with an\n' - ' "AttributeError" (either "__getattribute__()" raises an\n' - ' "AttributeError" because *name* is not an instance ' - 'attribute or an\n' - ' attribute in the class tree for "self"; or "__get__()" of ' - 'a *name*\n' - ' property raises "AttributeError"). This method should ' - 'either\n' - ' return the (computed) attribute value or raise an ' - '"AttributeError"\n' - ' exception. The "object" class itself does not provide ' - 'this method.\n' - '\n' - ' Note that if the attribute is found through the normal ' - 'mechanism,\n' - ' "__getattr__()" is not called. (This is an intentional ' - 'asymmetry\n' - ' between "__getattr__()" and "__setattr__()".) This is ' - 'done both for\n' - ' efficiency reasons and because otherwise "__getattr__()" ' - 'would have\n' - ' no way to access other attributes of the instance. Note ' - 'that at\n' - ' least for instance variables, you can take total control ' - 'by not\n' - ' inserting any values in the instance attribute dictionary ' - '(but\n' - ' instead inserting them in another object). See the\n' - ' "__getattribute__()" method below for a way to actually ' - 'get total\n' - ' control over attribute access.\n' - '\n' - 'object.__getattribute__(self, name)\n' - '\n' - ' Called unconditionally to implement attribute accesses ' - 'for\n' - ' instances of the class. If the class also defines ' - '"__getattr__()",\n' - ' the latter will not be called unless "__getattribute__()" ' - 'either\n' - ' calls it explicitly or raises an "AttributeError". This ' - 'method\n' - ' should return the (computed) attribute value or raise an\n' - ' "AttributeError" exception. In order to avoid infinite ' - 'recursion in\n' - ' this method, its implementation should always call the ' - 'base class\n' - ' method with the same name to access any attributes it ' - 'needs, for\n' - ' example, "object.__getattribute__(self, name)".\n' - '\n' - ' Note:\n' - '\n' - ' This method may still be bypassed when looking up ' - 'special methods\n' - ' as the result of implicit invocation via language ' - 'syntax or\n' - ' built-in functions. See Special method lookup.\n' - '\n' - ' For certain sensitive attribute accesses, raises an ' - 'auditing event\n' - ' "object.__getattr__" with arguments "obj" and "name".\n' - '\n' - 'object.__setattr__(self, name, value)\n' - '\n' - ' Called when an attribute assignment is attempted. This ' - 'is called\n' - ' instead of the normal mechanism (i.e. store the value in ' - 'the\n' - ' instance dictionary). *name* is the attribute name, ' - '*value* is the\n' - ' value to be assigned to it.\n' - '\n' - ' If "__setattr__()" wants to assign to an instance ' - 'attribute, it\n' - ' should call the base class method with the same name, for ' - 'example,\n' - ' "object.__setattr__(self, name, value)".\n' - '\n' - ' For certain sensitive attribute assignments, raises an ' - 'auditing\n' - ' event "object.__setattr__" with arguments "obj", "name", ' - '"value".\n' - '\n' - 'object.__delattr__(self, name)\n' - '\n' - ' Like "__setattr__()" but for attribute deletion instead ' - 'of\n' - ' assignment. This should only be implemented if "del ' - 'obj.name" is\n' - ' meaningful for the object.\n' - '\n' - ' For certain sensitive attribute deletions, raises an ' - 'auditing event\n' - ' "object.__delattr__" with arguments "obj" and "name".\n' - '\n' - 'object.__dir__(self)\n' - '\n' - ' Called when "dir()" is called on the object. An iterable ' - 'must be\n' - ' returned. "dir()" converts the returned iterable to a ' - 'list and\n' - ' sorts it.\n' - '\n' - '\n' - 'Customizing module attribute access\n' - '-----------------------------------\n' - '\n' - 'Special names "__getattr__" and "__dir__" can be also used ' - 'to\n' - 'customize access to module attributes. The "__getattr__" ' - 'function at\n' - 'the module level should accept one argument which is the ' - 'name of an\n' - 'attribute and return the computed value or raise an ' - '"AttributeError".\n' - 'If an attribute is not found on a module object through the ' - 'normal\n' - 'lookup, i.e. "object.__getattribute__()", then "__getattr__" ' - 'is\n' - 'searched in the module "__dict__" before raising an ' - '"AttributeError".\n' - 'If found, it is called with the attribute name and the ' - 'result is\n' - 'returned.\n' - '\n' - 'The "__dir__" function should accept no arguments, and ' - 'return an\n' - 'iterable of strings that represents the names accessible on ' - 'module. If\n' - 'present, this function overrides the standard "dir()" search ' - 'on a\n' - 'module.\n' - '\n' - 'For a more fine grained customization of the module behavior ' - '(setting\n' - 'attributes, properties, etc.), one can set the "__class__" ' - 'attribute\n' - 'of a module object to a subclass of "types.ModuleType". For ' - 'example:\n' - '\n' - ' import sys\n' - ' from types import ModuleType\n' - '\n' - ' class VerboseModule(ModuleType):\n' - ' def __repr__(self):\n' - " return f'Verbose {self.__name__}'\n" - '\n' - ' def __setattr__(self, attr, value):\n' - " print(f'Setting {attr}...')\n" - ' super().__setattr__(attr, value)\n' - '\n' - ' sys.modules[__name__].__class__ = VerboseModule\n' - '\n' - 'Note:\n' - '\n' - ' Defining module "__getattr__" and setting module ' - '"__class__" only\n' - ' affect lookups made using the attribute access syntax – ' - 'directly\n' - ' accessing the module globals (whether by code within the ' - 'module, or\n' - ' via a reference to the module’s globals dictionary) is ' - 'unaffected.\n' - '\n' - 'Changed in version 3.5: "__class__" module attribute is now ' - 'writable.\n' - '\n' - 'Added in version 3.7: "__getattr__" and "__dir__" module ' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 562** - Module __getattr__ and __dir__\n' - ' Describes the "__getattr__" and "__dir__" functions on ' - 'modules.\n' - '\n' - '\n' - 'Implementing Descriptors\n' - '------------------------\n' - '\n' - 'The following methods only apply when an instance of the ' - 'class\n' - 'containing the method (a so-called *descriptor* class) ' - 'appears in an\n' - '*owner* class (the descriptor must be in either the owner’s ' - 'class\n' - 'dictionary or in the class dictionary for one of its ' - 'parents). In the\n' - 'examples below, “the attribute†refers to the attribute ' - 'whose name is\n' - 'the key of the property in the owner class’ "__dict__". The ' - '"object"\n' - 'class itself does not implement any of these protocols.\n' - '\n' - 'object.__get__(self, instance, owner=None)\n' - '\n' - ' Called to get the attribute of the owner class (class ' - 'attribute\n' - ' access) or of an instance of that class (instance ' - 'attribute\n' - ' access). The optional *owner* argument is the owner ' - 'class, while\n' - ' *instance* is the instance that the attribute was ' - 'accessed through,\n' - ' or "None" when the attribute is accessed through the ' - '*owner*.\n' - '\n' - ' This method should return the computed attribute value or ' - 'raise an\n' - ' "AttributeError" exception.\n' - '\n' - ' **PEP 252** specifies that "__get__()" is callable with ' - 'one or two\n' - ' arguments. Python’s own built-in descriptors support ' - 'this\n' - ' specification; however, it is likely that some ' - 'third-party tools\n' - ' have descriptors that require both arguments. Python’s ' - 'own\n' - ' "__getattribute__()" implementation always passes in both ' - 'arguments\n' - ' whether they are required or not.\n' - '\n' - 'object.__set__(self, instance, value)\n' - '\n' - ' Called to set the attribute on an instance *instance* of ' - 'the owner\n' - ' class to a new value, *value*.\n' - '\n' - ' Note, adding "__set__()" or "__delete__()" changes the ' - 'kind of\n' - ' descriptor to a “data descriptorâ€. See Invoking ' - 'Descriptors for\n' - ' more details.\n' - '\n' - 'object.__delete__(self, instance)\n' - '\n' - ' Called to delete the attribute on an instance *instance* ' - 'of the\n' - ' owner class.\n' - '\n' - 'Instances of descriptors may also have the "__objclass__" ' - 'attribute\n' - 'present:\n' - '\n' - 'object.__objclass__\n' - '\n' - ' The attribute "__objclass__" is interpreted by the ' - '"inspect" module\n' - ' as specifying the class where this object was defined ' - '(setting this\n' - ' appropriately can assist in runtime introspection of ' - 'dynamic class\n' - ' attributes). For callables, it may indicate that an ' - 'instance of the\n' - ' given type (or a subclass) is expected or required as the ' - 'first\n' - ' positional argument (for example, CPython sets this ' - 'attribute for\n' - ' unbound methods that are implemented in C).\n' - '\n' - '\n' - 'Invoking Descriptors\n' - '--------------------\n' - '\n' - 'In general, a descriptor is an object attribute with ' - '“binding\n' - 'behaviorâ€, one whose attribute access has been overridden by ' - 'methods\n' - 'in the descriptor protocol: "__get__()", "__set__()", and\n' - '"__delete__()". If any of those methods are defined for an ' - 'object, it\n' - 'is said to be a descriptor.\n' - '\n' - 'The default behavior for attribute access is to get, set, or ' - 'delete\n' - 'the attribute from an object’s dictionary. For instance, ' - '"a.x" has a\n' - 'lookup chain starting with "a.__dict__[\'x\']", then\n' - '"type(a).__dict__[\'x\']", and continuing through the base ' - 'classes of\n' - '"type(a)" excluding metaclasses.\n' - '\n' - 'However, if the looked-up value is an object defining one of ' - 'the\n' - 'descriptor methods, then Python may override the default ' - 'behavior and\n' - 'invoke the descriptor method instead. Where this occurs in ' - 'the\n' - 'precedence chain depends on which descriptor methods were ' - 'defined and\n' - 'how they were called.\n' - '\n' - 'The starting point for descriptor invocation is a binding, ' - '"a.x". How\n' - 'the arguments are assembled depends on "a":\n' - '\n' - 'Direct Call\n' - ' The simplest and least common call is when user code ' - 'directly\n' - ' invokes a descriptor method: "x.__get__(a)".\n' - '\n' - 'Instance Binding\n' - ' If binding to an object instance, "a.x" is transformed ' - 'into the\n' - ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' - '\n' - 'Class Binding\n' - ' If binding to a class, "A.x" is transformed into the ' - 'call:\n' - ' "A.__dict__[\'x\'].__get__(None, A)".\n' - '\n' - 'Super Binding\n' - ' A dotted lookup such as "super(A, a).x" searches\n' - ' "a.__class__.__mro__" for a base class "B" following "A" ' - 'and then\n' - ' returns "B.__dict__[\'x\'].__get__(a, A)". If not a ' - 'descriptor, "x"\n' - ' is returned unchanged.\n' - '\n' - 'For instance bindings, the precedence of descriptor ' - 'invocation depends\n' - 'on which descriptor methods are defined. A descriptor can ' - 'define any\n' - 'combination of "__get__()", "__set__()" and "__delete__()". ' - 'If it\n' - 'does not define "__get__()", then accessing the attribute ' - 'will return\n' - 'the descriptor object itself unless there is a value in the ' - 'object’s\n' - 'instance dictionary. If the descriptor defines "__set__()" ' - 'and/or\n' - '"__delete__()", it is a data descriptor; if it defines ' - 'neither, it is\n' - 'a non-data descriptor. Normally, data descriptors define ' - 'both\n' - '"__get__()" and "__set__()", while non-data descriptors have ' - 'just the\n' - '"__get__()" method. Data descriptors with "__get__()" and ' - '"__set__()"\n' - '(and/or "__delete__()") defined always override a ' - 'redefinition in an\n' - 'instance dictionary. In contrast, non-data descriptors can ' - 'be\n' - 'overridden by instances.\n' - '\n' - 'Python methods (including those decorated with ' - '"@staticmethod" and\n' - '"@classmethod") are implemented as non-data descriptors. ' - 'Accordingly,\n' - 'instances can redefine and override methods. This allows ' - 'individual\n' - 'instances to acquire behaviors that differ from other ' - 'instances of the\n' - 'same class.\n' - '\n' - 'The "property()" function is implemented as a data ' - 'descriptor.\n' - 'Accordingly, instances cannot override the behavior of a ' - 'property.\n' - '\n' - '\n' - '__slots__\n' - '---------\n' - '\n' - '*__slots__* allow us to explicitly declare data members ' - '(like\n' - 'properties) and deny the creation of "__dict__" and ' - '*__weakref__*\n' - '(unless explicitly declared in *__slots__* or available in a ' - 'parent.)\n' - '\n' - 'The space saved over using "__dict__" can be significant. ' - 'Attribute\n' - 'lookup speed can be significantly improved as well.\n' - '\n' - 'object.__slots__\n' - '\n' - ' This class variable can be assigned a string, iterable, ' - 'or sequence\n' - ' of strings with variable names used by instances. ' - '*__slots__*\n' - ' reserves space for the declared variables and prevents ' - 'the\n' - ' automatic creation of "__dict__" and *__weakref__* for ' - 'each\n' - ' instance.\n' - '\n' - 'Notes on using *__slots__*:\n' - '\n' - '* When inheriting from a class without *__slots__*, the ' - '"__dict__" and\n' - ' *__weakref__* attribute of the instances will always be ' - 'accessible.\n' - '\n' - '* Without a "__dict__" variable, instances cannot be ' - 'assigned new\n' - ' variables not listed in the *__slots__* definition. ' - 'Attempts to\n' - ' assign to an unlisted variable name raises ' - '"AttributeError". If\n' - ' dynamic assignment of new variables is desired, then add\n' - ' "\'__dict__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' - '\n' - '* Without a *__weakref__* variable for each instance, ' - 'classes defining\n' - ' *__slots__* do not support "weak references" to its ' - 'instances. If\n' - ' weak reference support is needed, then add ' - '"\'__weakref__\'" to the\n' - ' sequence of strings in the *__slots__* declaration.\n' - '\n' - '* *__slots__* are implemented at the class level by ' - 'creating\n' - ' descriptors for each variable name. As a result, class ' - 'attributes\n' - ' cannot be used to set default values for instance ' - 'variables defined\n' - ' by *__slots__*; otherwise, the class attribute would ' - 'overwrite the\n' - ' descriptor assignment.\n' - '\n' - '* The action of a *__slots__* declaration is not limited to ' - 'the class\n' - ' where it is defined. *__slots__* declared in parents are ' - 'available\n' - ' in child classes. However, instances of a child subclass ' - 'will get a\n' - ' "__dict__" and *__weakref__* unless the subclass also ' - 'defines\n' - ' *__slots__* (which should only contain names of any ' - '*additional*\n' - ' slots).\n' - '\n' - '* If a class defines a slot also defined in a base class, ' - 'the instance\n' - ' variable defined by the base class slot is inaccessible ' - '(except by\n' - ' retrieving its descriptor directly from the base class). ' - 'This\n' - ' renders the meaning of the program undefined. In the ' - 'future, a\n' - ' check may be added to prevent this.\n' - '\n' - '* "TypeError" will be raised if nonempty *__slots__* are ' - 'defined for a\n' - ' class derived from a ""variable-length" built-in type" ' - 'such as\n' - ' "int", "bytes", and "tuple".\n' - '\n' - '* Any non-string *iterable* may be assigned to *__slots__*.\n' - '\n' - '* If a "dictionary" is used to assign *__slots__*, the ' - 'dictionary keys\n' - ' will be used as the slot names. The values of the ' - 'dictionary can be\n' - ' used to provide per-attribute docstrings that will be ' - 'recognised by\n' - ' "inspect.getdoc()" and displayed in the output of ' - '"help()".\n' - '\n' - '* "__class__" assignment works only if both classes have the ' - 'same\n' - ' *__slots__*.\n' - '\n' - '* Multiple inheritance with multiple slotted parent classes ' - 'can be\n' - ' used, but only one parent is allowed to have attributes ' - 'created by\n' - ' slots (the other bases must have empty slot layouts) - ' - 'violations\n' - ' raise "TypeError".\n' - '\n' - '* If an *iterator* is used for *__slots__* then a ' - '*descriptor* is\n' - ' created for each of the iterator’s values. However, the ' - '*__slots__*\n' - ' attribute will be an empty iterator.\n' - '\n' - '\n' - 'Customizing class creation\n' - '==========================\n' - '\n' - 'Whenever a class inherits from another class, ' - '"__init_subclass__()" is\n' - 'called on the parent class. This way, it is possible to ' - 'write classes\n' - 'which change the behavior of subclasses. This is closely ' - 'related to\n' - 'class decorators, but where class decorators only affect the ' - 'specific\n' - 'class they’re applied to, "__init_subclass__" solely applies ' - 'to future\n' - 'subclasses of the class defining the method.\n' - '\n' - 'classmethod object.__init_subclass__(cls)\n' - '\n' - ' This method is called whenever the containing class is ' - 'subclassed.\n' - ' *cls* is then the new subclass. If defined as a normal ' - 'instance\n' - ' method, this method is implicitly converted to a class ' - 'method.\n' - '\n' - ' Keyword arguments which are given to a new class are ' - 'passed to the\n' - ' parent class’s "__init_subclass__". For compatibility ' - 'with other\n' - ' classes using "__init_subclass__", one should take out ' - 'the needed\n' - ' keyword arguments and pass the others over to the base ' - 'class, as\n' - ' in:\n' - '\n' - ' class Philosopher:\n' - ' def __init_subclass__(cls, /, default_name, ' - '**kwargs):\n' - ' super().__init_subclass__(**kwargs)\n' - ' cls.default_name = default_name\n' - '\n' - ' class AustralianPhilosopher(Philosopher, ' - 'default_name="Bruce"):\n' - ' pass\n' - '\n' - ' The default implementation "object.__init_subclass__" ' - 'does nothing,\n' - ' but raises an error if it is called with any arguments.\n' - '\n' - ' Note:\n' - '\n' - ' The metaclass hint "metaclass" is consumed by the rest ' - 'of the\n' - ' type machinery, and is never passed to ' - '"__init_subclass__"\n' - ' implementations. The actual metaclass (rather than the ' - 'explicit\n' - ' hint) can be accessed as "type(cls)".\n' - '\n' - ' Added in version 3.6.\n' - '\n' - 'When a class is created, "type.__new__()" scans the class ' - 'variables\n' - 'and makes callbacks to those with a "__set_name__()" hook.\n' - '\n' - 'object.__set_name__(self, owner, name)\n' - '\n' - ' Automatically called at the time the owning class *owner* ' - 'is\n' - ' created. The object has been assigned to *name* in that ' - 'class:\n' - '\n' - ' class A:\n' - ' x = C() # Automatically calls: x.__set_name__(A, ' - "'x')\n" - '\n' - ' If the class variable is assigned after the class is ' - 'created,\n' - ' "__set_name__()" will not be called automatically. If ' - 'needed,\n' - ' "__set_name__()" can be called directly:\n' - '\n' - ' class A:\n' - ' pass\n' - '\n' - ' c = C()\n' - ' A.x = c # The hook is not called\n' - " c.__set_name__(A, 'x') # Manually invoke the hook\n" - '\n' - ' See Creating the class object for more details.\n' - '\n' - ' Added in version 3.6.\n' - '\n' - '\n' - 'Metaclasses\n' - '-----------\n' - '\n' - 'By default, classes are constructed using "type()". The ' - 'class body is\n' - 'executed in a new namespace and the class name is bound ' - 'locally to the\n' - 'result of "type(name, bases, namespace)".\n' - '\n' - 'The class creation process can be customized by passing the\n' - '"metaclass" keyword argument in the class definition line, ' - 'or by\n' - 'inheriting from an existing class that included such an ' - 'argument. In\n' - 'the following example, both "MyClass" and "MySubclass" are ' - 'instances\n' - 'of "Meta":\n' - '\n' - ' class Meta(type):\n' - ' pass\n' - '\n' - ' class MyClass(metaclass=Meta):\n' - ' pass\n' - '\n' - ' class MySubclass(MyClass):\n' - ' pass\n' - '\n' - 'Any other keyword arguments that are specified in the class ' - 'definition\n' - 'are passed through to all metaclass operations described ' - 'below.\n' - '\n' - 'When a class definition is executed, the following steps ' - 'occur:\n' - '\n' - '* MRO entries are resolved;\n' - '\n' - '* the appropriate metaclass is determined;\n' - '\n' - '* the class namespace is prepared;\n' - '\n' - '* the class body is executed;\n' - '\n' - '* the class object is created.\n' - '\n' - '\n' - 'Resolving MRO entries\n' - '---------------------\n' - '\n' - 'object.__mro_entries__(self, bases)\n' - '\n' - ' If a base that appears in a class definition is not an ' - 'instance of\n' - ' "type", then an "__mro_entries__()" method is searched on ' - 'the base.\n' - ' If an "__mro_entries__()" method is found, the base is ' - 'substituted\n' - ' with the result of a call to "__mro_entries__()" when ' - 'creating the\n' - ' class. The method is called with the original bases tuple ' - 'passed to\n' - ' the *bases* parameter, and must return a tuple of classes ' - 'that will\n' - ' be used instead of the base. The returned tuple may be ' - 'empty: in\n' - ' these cases, the original base is ignored.\n' - '\n' - 'See also:\n' - '\n' - ' "types.resolve_bases()"\n' - ' Dynamically resolve bases that are not instances of ' - '"type".\n' - '\n' - ' "types.get_original_bases()"\n' - ' Retrieve a class’s “original bases†prior to ' - 'modifications by\n' - ' "__mro_entries__()".\n' - '\n' - ' **PEP 560**\n' - ' Core support for typing module and generic types.\n' - '\n' - '\n' - 'Determining the appropriate metaclass\n' - '-------------------------------------\n' - '\n' - 'The appropriate metaclass for a class definition is ' - 'determined as\n' - 'follows:\n' - '\n' - '* if no bases and no explicit metaclass are given, then ' - '"type()" is\n' - ' used;\n' - '\n' - '* if an explicit metaclass is given and it is *not* an ' - 'instance of\n' - ' "type()", then it is used directly as the metaclass;\n' - '\n' - '* if an instance of "type()" is given as the explicit ' - 'metaclass, or\n' - ' bases are defined, then the most derived metaclass is ' - 'used.\n' - '\n' - 'The most derived metaclass is selected from the explicitly ' - 'specified\n' - 'metaclass (if any) and the metaclasses (i.e. "type(cls)") of ' - 'all\n' - 'specified base classes. The most derived metaclass is one ' - 'which is a\n' - 'subtype of *all* of these candidate metaclasses. If none of ' - 'the\n' - 'candidate metaclasses meets that criterion, then the class ' - 'definition\n' - 'will fail with "TypeError".\n' - '\n' - '\n' - 'Preparing the class namespace\n' - '-----------------------------\n' - '\n' - 'Once the appropriate metaclass has been identified, then the ' - 'class\n' - 'namespace is prepared. If the metaclass has a "__prepare__" ' - 'attribute,\n' - 'it is called as "namespace = metaclass.__prepare__(name, ' - 'bases,\n' - '**kwds)" (where the additional keyword arguments, if any, ' - 'come from\n' - 'the class definition). The "__prepare__" method should be ' - 'implemented\n' - 'as a "classmethod". The namespace returned by "__prepare__" ' - 'is passed\n' - 'in to "__new__", but when the final class object is created ' - 'the\n' - 'namespace is copied into a new "dict".\n' - '\n' - 'If the metaclass has no "__prepare__" attribute, then the ' - 'class\n' - 'namespace is initialised as an empty ordered mapping.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' Introduced the "__prepare__" namespace hook\n' - '\n' - '\n' - 'Executing the class body\n' - '------------------------\n' - '\n' - 'The class body is executed (approximately) as "exec(body, ' - 'globals(),\n' - 'namespace)". The key difference from a normal call to ' - '"exec()" is that\n' - 'lexical scoping allows the class body (including any ' - 'methods) to\n' - 'reference names from the current and outer scopes when the ' - 'class\n' - 'definition occurs inside a function.\n' - '\n' - 'However, even when the class definition occurs inside the ' - 'function,\n' - 'methods defined inside the class still cannot see names ' - 'defined at the\n' - 'class scope. Class variables must be accessed through the ' - 'first\n' - 'parameter of instance or class methods, or through the ' - 'implicit\n' - 'lexically scoped "__class__" reference described in the next ' - 'section.\n' - '\n' - '\n' - 'Creating the class object\n' - '-------------------------\n' - '\n' - 'Once the class namespace has been populated by executing the ' - 'class\n' - 'body, the class object is created by calling ' - '"metaclass(name, bases,\n' - 'namespace, **kwds)" (the additional keywords passed here are ' - 'the same\n' - 'as those passed to "__prepare__").\n' - '\n' - 'This class object is the one that will be referenced by the ' - 'zero-\n' - 'argument form of "super()". "__class__" is an implicit ' - 'closure\n' - 'reference created by the compiler if any methods in a class ' - 'body refer\n' - 'to either "__class__" or "super". This allows the zero ' - 'argument form\n' - 'of "super()" to correctly identify the class being defined ' - 'based on\n' - 'lexical scoping, while the class or instance that was used ' - 'to make the\n' - 'current call is identified based on the first argument ' - 'passed to the\n' - 'method.\n' - '\n' - '**CPython implementation detail:** In CPython 3.6 and later, ' - 'the\n' - '"__class__" cell is passed to the metaclass as a ' - '"__classcell__" entry\n' - 'in the class namespace. If present, this must be propagated ' - 'up to the\n' - '"type.__new__" call in order for the class to be ' - 'initialised\n' - 'correctly. Failing to do so will result in a "RuntimeError" ' - 'in Python\n' - '3.8.\n' - '\n' - 'When using the default metaclass "type", or any metaclass ' - 'that\n' - 'ultimately calls "type.__new__", the following additional\n' - 'customization steps are invoked after creating the class ' - 'object:\n' - '\n' - '1. The "type.__new__" method collects all of the attributes ' - 'in the\n' - ' class namespace that define a "__set_name__()" method;\n' - '\n' - '2. Those "__set_name__" methods are called with the class ' - 'being\n' - ' defined and the assigned name of that particular ' - 'attribute;\n' - '\n' - '3. The "__init_subclass__()" hook is called on the immediate ' - 'parent of\n' - ' the new class in its method resolution order.\n' - '\n' - 'After the class object is created, it is passed to the ' - 'class\n' - 'decorators included in the class definition (if any) and the ' - 'resulting\n' - 'object is bound in the local namespace as the defined ' - 'class.\n' - '\n' - 'When a new class is created by "type.__new__", the object ' - 'provided as\n' - 'the namespace parameter is copied to a new ordered mapping ' - 'and the\n' - 'original object is discarded. The new copy is wrapped in a ' - 'read-only\n' - 'proxy, which becomes the "__dict__" attribute of the class ' - 'object.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3135** - New super\n' - ' Describes the implicit "__class__" closure reference\n' - '\n' - '\n' - 'Uses for metaclasses\n' - '--------------------\n' - '\n' - 'The potential uses for metaclasses are boundless. Some ideas ' - 'that have\n' - 'been explored include enum, logging, interface checking, ' - 'automatic\n' - 'delegation, automatic property creation, proxies, ' - 'frameworks, and\n' - 'automatic resource locking/synchronization.\n' - '\n' - '\n' - 'Customizing instance and subclass checks\n' - '========================================\n' - '\n' - 'The following methods are used to override the default ' - 'behavior of the\n' - '"isinstance()" and "issubclass()" built-in functions.\n' - '\n' - 'In particular, the metaclass "abc.ABCMeta" implements these ' - 'methods in\n' - 'order to allow the addition of Abstract Base Classes (ABCs) ' - 'as\n' - '“virtual base classes†to any class or type (including ' - 'built-in\n' - 'types), including other ABCs.\n' - '\n' - 'type.__instancecheck__(self, instance)\n' - '\n' - ' Return true if *instance* should be considered a (direct ' - 'or\n' - ' indirect) instance of *class*. If defined, called to ' - 'implement\n' - ' "isinstance(instance, class)".\n' - '\n' - 'type.__subclasscheck__(self, subclass)\n' - '\n' - ' Return true if *subclass* should be considered a (direct ' - 'or\n' - ' indirect) subclass of *class*. If defined, called to ' - 'implement\n' - ' "issubclass(subclass, class)".\n' - '\n' - 'Note that these methods are looked up on the type ' - '(metaclass) of a\n' - 'class. They cannot be defined as class methods in the ' - 'actual class.\n' - 'This is consistent with the lookup of special methods that ' - 'are called\n' - 'on instances, only in this case the instance is itself a ' - 'class.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3119** - Introducing Abstract Base Classes\n' - ' Includes the specification for customizing ' - '"isinstance()" and\n' - ' "issubclass()" behavior through "__instancecheck__()" ' - 'and\n' - ' "__subclasscheck__()", with motivation for this ' - 'functionality in\n' - ' the context of adding Abstract Base Classes (see the ' - '"abc"\n' - ' module) to the language.\n' - '\n' - '\n' - 'Emulating generic types\n' - '=======================\n' - '\n' - 'When using *type annotations*, it is often useful to ' - '*parameterize* a\n' - '*generic type* using Python’s square-brackets notation. For ' - 'example,\n' - 'the annotation "list[int]" might be used to signify a "list" ' - 'in which\n' - 'all the elements are of type "int".\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Introducing Python’s framework for type annotations\n' - '\n' - ' Generic Alias Types\n' - ' Documentation for objects representing parameterized ' - 'generic\n' - ' classes\n' - '\n' - ' Generics, user-defined generics and "typing.Generic"\n' - ' Documentation on how to implement generic classes that ' - 'can be\n' - ' parameterized at runtime and understood by static ' - 'type-checkers.\n' - '\n' - 'A class can *generally* only be parameterized if it defines ' - 'the\n' - 'special class method "__class_getitem__()".\n' - '\n' - 'classmethod object.__class_getitem__(cls, key)\n' - '\n' - ' Return an object representing the specialization of a ' - 'generic class\n' - ' by type arguments found in *key*.\n' - '\n' - ' When defined on a class, "__class_getitem__()" is ' - 'automatically a\n' - ' class method. As such, there is no need for it to be ' - 'decorated with\n' - ' "@classmethod" when it is defined.\n' - '\n' - '\n' - 'The purpose of *__class_getitem__*\n' - '----------------------------------\n' - '\n' - 'The purpose of "__class_getitem__()" is to allow runtime\n' - 'parameterization of standard-library generic classes in ' - 'order to more\n' - 'easily apply *type hints* to these classes.\n' - '\n' - 'To implement custom generic classes that can be ' - 'parameterized at\n' - 'runtime and understood by static type-checkers, users should ' - 'either\n' - 'inherit from a standard library class that already ' - 'implements\n' - '"__class_getitem__()", or inherit from "typing.Generic", ' - 'which has its\n' - 'own implementation of "__class_getitem__()".\n' - '\n' - 'Custom implementations of "__class_getitem__()" on classes ' - 'defined\n' - 'outside of the standard library may not be understood by ' - 'third-party\n' - 'type-checkers such as mypy. Using "__class_getitem__()" on ' - 'any class\n' - 'for purposes other than type hinting is discouraged.\n' - '\n' - '\n' - '*__class_getitem__* versus *__getitem__*\n' - '----------------------------------------\n' - '\n' - 'Usually, the subscription of an object using square brackets ' - 'will call\n' - 'the "__getitem__()" instance method defined on the object’s ' - 'class.\n' - 'However, if the object being subscribed is itself a class, ' - 'the class\n' - 'method "__class_getitem__()" may be called instead.\n' - '"__class_getitem__()" should return a GenericAlias object if ' - 'it is\n' - 'properly defined.\n' - '\n' - 'Presented with the *expression* "obj[x]", the Python ' - 'interpreter\n' - 'follows something like the following process to decide ' - 'whether\n' - '"__getitem__()" or "__class_getitem__()" should be called:\n' - '\n' - ' from inspect import isclass\n' - '\n' - ' def subscribe(obj, x):\n' - ' """Return the result of the expression \'obj[x]\'"""\n' - '\n' - ' class_of_obj = type(obj)\n' - '\n' - ' # If the class of obj defines __getitem__,\n' - ' # call class_of_obj.__getitem__(obj, x)\n' - " if hasattr(class_of_obj, '__getitem__'):\n" - ' return class_of_obj.__getitem__(obj, x)\n' - '\n' - ' # Else, if obj is a class and defines ' - '__class_getitem__,\n' - ' # call obj.__class_getitem__(x)\n' - ' elif isclass(obj) and hasattr(obj, ' - "'__class_getitem__'):\n" - ' return obj.__class_getitem__(x)\n' - '\n' - ' # Else, raise an exception\n' - ' else:\n' - ' raise TypeError(\n' - ' f"\'{class_of_obj.__name__}\' object is not ' - 'subscriptable"\n' - ' )\n' - '\n' - 'In Python, all classes are themselves instances of other ' - 'classes. The\n' - 'class of a class is known as that class’s *metaclass*, and ' - 'most\n' - 'classes have the "type" class as their metaclass. "type" ' - 'does not\n' - 'define "__getitem__()", meaning that expressions such as ' - '"list[int]",\n' - '"dict[str, float]" and "tuple[str, bytes]" all result in\n' - '"__class_getitem__()" being called:\n' - '\n' - ' >>> # list has class "type" as its metaclass, like most ' - 'classes:\n' - ' >>> type(list)\n' - " \n" - ' >>> type(dict) == type(list) == type(tuple) == type(str) ' - '== type(bytes)\n' - ' True\n' - ' >>> # "list[int]" calls "list.__class_getitem__(int)"\n' - ' >>> list[int]\n' - ' list[int]\n' - ' >>> # list.__class_getitem__ returns a GenericAlias ' - 'object:\n' - ' >>> type(list[int])\n' - " \n" - '\n' - 'However, if a class has a custom metaclass that defines\n' - '"__getitem__()", subscribing the class may result in ' - 'different\n' - 'behaviour. An example of this can be found in the "enum" ' - 'module:\n' - '\n' - ' >>> from enum import Enum\n' - ' >>> class Menu(Enum):\n' - ' ... """A breakfast menu"""\n' - " ... SPAM = 'spam'\n" - " ... BACON = 'bacon'\n" - ' ...\n' - ' >>> # Enum classes have a custom metaclass:\n' - ' >>> type(Menu)\n' - " \n" - ' >>> # EnumMeta defines __getitem__,\n' - ' >>> # so __class_getitem__ is not called,\n' - ' >>> # and the result is not a GenericAlias object:\n' - " >>> Menu['SPAM']\n" - " \n" - " >>> type(Menu['SPAM'])\n" - " \n" - '\n' - 'See also:\n' - '\n' - ' **PEP 560** - Core Support for typing module and generic ' - 'types\n' - ' Introducing "__class_getitem__()", and outlining when ' - 'a\n' - ' subscription results in "__class_getitem__()" being ' - 'called\n' - ' instead of "__getitem__()"\n' - '\n' - '\n' - 'Emulating callable objects\n' - '==========================\n' - '\n' - 'object.__call__(self[, args...])\n' - '\n' - ' Called when the instance is “called†as a function; if ' - 'this method\n' - ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)". The "object" class ' - 'itself does\n' - ' not provide this method.\n' - '\n' - '\n' - 'Emulating container types\n' - '=========================\n' - '\n' - 'The following methods can be defined to implement container ' - 'objects.\n' - 'None of them are provided by the "object" class itself. ' - 'Containers\n' - 'usually are *sequences* (such as "lists" or "tuples") or ' - '*mappings*\n' - '(like *dictionaries*), but can represent other containers as ' - 'well.\n' - 'The first set of methods is used either to emulate a ' - 'sequence or to\n' - 'emulate a mapping; the difference is that for a sequence, ' - 'the\n' - 'allowable keys should be the integers *k* for which "0 <= k ' - '< N" where\n' - '*N* is the length of the sequence, or "slice" objects, which ' - 'define a\n' - 'range of items. It is also recommended that mappings ' - 'provide the\n' - 'methods "keys()", "values()", "items()", "get()", ' - '"clear()",\n' - '"setdefault()", "pop()", "popitem()", "copy()", and ' - '"update()"\n' - 'behaving similar to those for Python’s standard "dictionary" ' - 'objects.\n' - 'The "collections.abc" module provides a "MutableMapping" ' - '*abstract\n' - 'base class* to help create those methods from a base set of\n' - '"__getitem__()", "__setitem__()", "__delitem__()", and ' - '"keys()".\n' - 'Mutable sequences should provide methods "append()", ' - '"count()",\n' - '"index()", "extend()", "insert()", "pop()", "remove()", ' - '"reverse()"\n' - 'and "sort()", like Python standard "list" objects. Finally, ' - 'sequence\n' - 'types should implement addition (meaning concatenation) and\n' - 'multiplication (meaning repetition) by defining the methods\n' - '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' - '"__rmul__()" and\n' - '"__imul__()" described below; they should not define other ' - 'numerical\n' - 'operators. It is recommended that both mappings and ' - 'sequences\n' - 'implement the "__contains__()" method to allow efficient use ' - 'of the\n' - '"in" operator; for mappings, "in" should search the ' - 'mapping’s keys;\n' - 'for sequences, it should search through the values. It is ' - 'further\n' - 'recommended that both mappings and sequences implement the\n' - '"__iter__()" method to allow efficient iteration through ' - 'the\n' - 'container; for mappings, "__iter__()" should iterate through ' - 'the\n' - 'object’s keys; for sequences, it should iterate through the ' - 'values.\n' - '\n' - 'object.__len__(self)\n' - '\n' - ' Called to implement the built-in function "len()". ' - 'Should return\n' - ' the length of the object, an integer ">=" 0. Also, an ' - 'object that\n' - ' doesn’t define a "__bool__()" method and whose ' - '"__len__()" method\n' - ' returns zero is considered to be false in a Boolean ' - 'context.\n' - '\n' - ' **CPython implementation detail:** In CPython, the length ' - 'is\n' - ' required to be at most "sys.maxsize". If the length is ' - 'larger than\n' - ' "sys.maxsize" some features (such as "len()") may raise\n' - ' "OverflowError". To prevent raising "OverflowError" by ' - 'truth value\n' - ' testing, an object must define a "__bool__()" method.\n' - '\n' - 'object.__length_hint__(self)\n' - '\n' - ' Called to implement "operator.length_hint()". Should ' - 'return an\n' - ' estimated length for the object (which may be greater or ' - 'less than\n' - ' the actual length). The length must be an integer ">=" 0. ' - 'The\n' - ' return value may also be "NotImplemented", which is ' - 'treated the\n' - ' same as if the "__length_hint__" method didn’t exist at ' - 'all. This\n' - ' method is purely an optimization and is never required ' - 'for\n' - ' correctness.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - 'Note:\n' - '\n' - ' Slicing is done exclusively with the following three ' - 'methods. A\n' - ' call like\n' - '\n' - ' a[1:2] = b\n' - '\n' - ' is translated to\n' - '\n' - ' a[slice(1, 2, None)] = b\n' - '\n' - ' and so forth. Missing slice items are always filled in ' - 'with "None".\n' - '\n' - 'object.__getitem__(self, key)\n' - '\n' - ' Called to implement evaluation of "self[key]". For ' - '*sequence*\n' - ' types, the accepted keys should be integers. Optionally, ' - 'they may\n' - ' support "slice" objects as well. Negative index support ' - 'is also\n' - ' optional. If *key* is of an inappropriate type, ' - '"TypeError" may be\n' - ' raised; if *key* is a value outside the set of indexes ' - 'for the\n' - ' sequence (after any special interpretation of negative ' - 'values),\n' - ' "IndexError" should be raised. For *mapping* types, if ' - '*key* is\n' - ' missing (not in the container), "KeyError" should be ' - 'raised.\n' - '\n' - ' Note:\n' - '\n' - ' "for" loops expect that an "IndexError" will be raised ' - 'for\n' - ' illegal indexes to allow proper detection of the end of ' - 'the\n' - ' sequence.\n' - '\n' - ' Note:\n' - '\n' - ' When subscripting a *class*, the special class method\n' - ' "__class_getitem__()" may be called instead of ' - '"__getitem__()".\n' - ' See __class_getitem__ versus __getitem__ for more ' - 'details.\n' - '\n' - 'object.__setitem__(self, key, value)\n' - '\n' - ' Called to implement assignment to "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support changes to the values for keys, or if ' - 'new keys\n' - ' can be added, or for sequences if elements can be ' - 'replaced. The\n' - ' same exceptions should be raised for improper *key* ' - 'values as for\n' - ' the "__getitem__()" method.\n' - '\n' - 'object.__delitem__(self, key)\n' - '\n' - ' Called to implement deletion of "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support removal of keys, or for sequences if ' - 'elements\n' - ' can be removed from the sequence. The same exceptions ' - 'should be\n' - ' raised for improper *key* values as for the ' - '"__getitem__()" method.\n' - '\n' - 'object.__missing__(self, key)\n' - '\n' - ' Called by "dict"."__getitem__()" to implement "self[key]" ' - 'for dict\n' - ' subclasses when key is not in the dictionary.\n' - '\n' - 'object.__iter__(self)\n' - '\n' - ' This method is called when an *iterator* is required for ' - 'a\n' - ' container. This method should return a new iterator ' - 'object that can\n' - ' iterate over all the objects in the container. For ' - 'mappings, it\n' - ' should iterate over the keys of the container.\n' - '\n' - 'object.__reversed__(self)\n' - '\n' - ' Called (if present) by the "reversed()" built-in to ' - 'implement\n' - ' reverse iteration. It should return a new iterator ' - 'object that\n' - ' iterates over all the objects in the container in reverse ' - 'order.\n' - '\n' - ' If the "__reversed__()" method is not provided, the ' - '"reversed()"\n' - ' built-in will fall back to using the sequence protocol ' - '("__len__()"\n' - ' and "__getitem__()"). Objects that support the sequence ' - 'protocol\n' - ' should only provide "__reversed__()" if they can provide ' - 'an\n' - ' implementation that is more efficient than the one ' - 'provided by\n' - ' "reversed()".\n' - '\n' - 'The membership test operators ("in" and "not in") are ' - 'normally\n' - 'implemented as an iteration through a container. However, ' - 'container\n' - 'objects can supply the following special method with a more ' - 'efficient\n' - 'implementation, which also does not require the object be ' - 'iterable.\n' - '\n' - 'object.__contains__(self, item)\n' - '\n' - ' Called to implement membership test operators. Should ' - 'return true\n' - ' if *item* is in *self*, false otherwise. For mapping ' - 'objects, this\n' - ' should consider the keys of the mapping rather than the ' - 'values or\n' - ' the key-item pairs.\n' - '\n' - ' For objects that don’t define "__contains__()", the ' - 'membership test\n' - ' first tries iteration via "__iter__()", then the old ' - 'sequence\n' - ' iteration protocol via "__getitem__()", see this section ' - 'in the\n' - ' language reference.\n' - '\n' - '\n' - 'Emulating numeric types\n' - '=======================\n' - '\n' - 'The following methods can be defined to emulate numeric ' - 'objects.\n' - 'Methods corresponding to operations that are not supported ' - 'by the\n' - 'particular kind of number implemented (e.g., bitwise ' - 'operations for\n' - 'non-integral numbers) should be left undefined.\n' - '\n' - 'object.__add__(self, other)\n' - 'object.__sub__(self, other)\n' - 'object.__mul__(self, other)\n' - 'object.__matmul__(self, other)\n' - 'object.__truediv__(self, other)\n' - 'object.__floordiv__(self, other)\n' - 'object.__mod__(self, other)\n' - 'object.__divmod__(self, other)\n' - 'object.__pow__(self, other[, modulo])\n' - 'object.__lshift__(self, other)\n' - 'object.__rshift__(self, other)\n' - 'object.__and__(self, other)\n' - 'object.__xor__(self, other)\n' - 'object.__or__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, ' - 'to\n' - ' evaluate the expression "x + y", where *x* is an instance ' - 'of a\n' - ' class that has an "__add__()" method, "type(x).__add__(x, ' - 'y)" is\n' - ' called. The "__divmod__()" method should be the ' - 'equivalent to\n' - ' using "__floordiv__()" and "__mod__()"; it should not be ' - 'related to\n' - ' "__truediv__()". Note that "__pow__()" should be defined ' - 'to accept\n' - ' an optional third argument if the ternary version of the ' - 'built-in\n' - ' "pow()" function is to be supported.\n' - '\n' - ' If one of those methods does not support the operation ' - 'with the\n' - ' supplied arguments, it should return "NotImplemented".\n' - '\n' - 'object.__radd__(self, other)\n' - 'object.__rsub__(self, other)\n' - 'object.__rmul__(self, other)\n' - 'object.__rmatmul__(self, other)\n' - 'object.__rtruediv__(self, other)\n' - 'object.__rfloordiv__(self, other)\n' - 'object.__rmod__(self, other)\n' - 'object.__rdivmod__(self, other)\n' - 'object.__rpow__(self, other[, modulo])\n' - 'object.__rlshift__(self, other)\n' - 'object.__rrshift__(self, other)\n' - 'object.__rand__(self, other)\n' - 'object.__rxor__(self, other)\n' - 'object.__ror__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' - '(swapped)\n' - ' operands. These functions are only called if the ' - 'operands are of\n' - ' different types, when the left operand does not support ' - 'the\n' - ' corresponding operation [3], or the right operand’s class ' - 'is\n' - ' derived from the left operand’s class. [4] For instance, ' - 'to\n' - ' evaluate the expression "x - y", where *y* is an instance ' - 'of a\n' - ' class that has an "__rsub__()" method, ' - '"type(y).__rsub__(y, x)" is\n' - ' called if "type(x).__sub__(x, y)" returns ' - '"NotImplemented" or\n' - ' "type(y)" is a subclass of "type(x)". [5]\n' - '\n' - ' Note that ternary "pow()" will not try calling ' - '"__rpow__()" (the\n' - ' coercion rules would become too complicated).\n' - '\n' - ' Note:\n' - '\n' - ' If the right operand’s type is a subclass of the left ' - 'operand’s\n' - ' type and that subclass provides a different ' - 'implementation of the\n' - ' reflected method for the operation, this method will be ' - 'called\n' - ' before the left operand’s non-reflected method. This ' - 'behavior\n' - ' allows subclasses to override their ancestors’ ' - 'operations.\n' - '\n' - 'object.__iadd__(self, other)\n' - 'object.__isub__(self, other)\n' - 'object.__imul__(self, other)\n' - 'object.__imatmul__(self, other)\n' - 'object.__itruediv__(self, other)\n' - 'object.__ifloordiv__(self, other)\n' - 'object.__imod__(self, other)\n' - 'object.__ipow__(self, other[, modulo])\n' - 'object.__ilshift__(self, other)\n' - 'object.__irshift__(self, other)\n' - 'object.__iand__(self, other)\n' - 'object.__ixor__(self, other)\n' - 'object.__ior__(self, other)\n' - '\n' - ' These methods are called to implement the augmented ' - 'arithmetic\n' - ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' - '"**=",\n' - ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' - 'attempt to\n' - ' do the operation in-place (modifying *self*) and return ' - 'the result\n' - ' (which could be, but does not have to be, *self*). If a ' - 'specific\n' - ' method is not defined, or if that method returns ' - '"NotImplemented",\n' - ' the augmented assignment falls back to the normal ' - 'methods. For\n' - ' instance, if *x* is an instance of a class with an ' - '"__iadd__()"\n' - ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' - 'If\n' - ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' - 'returns\n' - ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are\n' - ' considered, as with the evaluation of "x + y". In ' - 'certain\n' - ' situations, augmented assignment can result in unexpected ' - 'errors\n' - ' (see Why does a_tuple[i] += [‘item’] raise an exception ' - 'when the\n' - ' addition works?), but this behavior is in fact part of ' - 'the data\n' - ' model.\n' - '\n' - 'object.__neg__(self)\n' - 'object.__pos__(self)\n' - 'object.__abs__(self)\n' - 'object.__invert__(self)\n' - '\n' - ' Called to implement the unary arithmetic operations ("-", ' - '"+",\n' - ' "abs()" and "~").\n' - '\n' - 'object.__complex__(self)\n' - 'object.__int__(self)\n' - 'object.__float__(self)\n' - '\n' - ' Called to implement the built-in functions "complex()", ' - '"int()" and\n' - ' "float()". Should return a value of the appropriate ' - 'type.\n' - '\n' - 'object.__index__(self)\n' - '\n' - ' Called to implement "operator.index()", and whenever ' - 'Python needs\n' - ' to losslessly convert the numeric object to an integer ' - 'object (such\n' - ' as in slicing, or in the built-in "bin()", "hex()" and ' - '"oct()"\n' - ' functions). Presence of this method indicates that the ' - 'numeric\n' - ' object is an integer type. Must return an integer.\n' - '\n' - ' If "__int__()", "__float__()" and "__complex__()" are not ' - 'defined\n' - ' then corresponding built-in functions "int()", "float()" ' - 'and\n' - ' "complex()" fall back to "__index__()".\n' - '\n' - 'object.__round__(self[, ndigits])\n' - 'object.__trunc__(self)\n' - 'object.__floor__(self)\n' - 'object.__ceil__(self)\n' - '\n' - ' Called to implement the built-in function "round()" and ' - '"math"\n' - ' functions "trunc()", "floor()" and "ceil()". Unless ' - '*ndigits* is\n' - ' passed to "__round__()" all these methods should return ' - 'the value\n' - ' of the object truncated to an "Integral" (typically an ' - '"int").\n' - '\n' - ' Changed in version 3.14: "int()" no longer delegates to ' - 'the\n' - ' "__trunc__()" method.\n' - '\n' - '\n' - 'With Statement Context Managers\n' - '===============================\n' - '\n' - 'A *context manager* is an object that defines the runtime ' - 'context to\n' - 'be established when executing a "with" statement. The ' - 'context manager\n' - 'handles the entry into, and the exit from, the desired ' - 'runtime context\n' - 'for the execution of the block of code. Context managers ' - 'are normally\n' - 'invoked using the "with" statement (described in section The ' - 'with\n' - 'statement), but can also be used by directly invoking their ' - 'methods.\n' - '\n' - 'Typical uses of context managers include saving and ' - 'restoring various\n' - 'kinds of global state, locking and unlocking resources, ' - 'closing opened\n' - 'files, etc.\n' - '\n' - 'For more information on context managers, see Context ' - 'Manager Types.\n' - 'The "object" class itself does not provide the context ' - 'manager\n' - 'methods.\n' - '\n' - 'object.__enter__(self)\n' - '\n' - ' Enter the runtime context related to this object. The ' - '"with"\n' - ' statement will bind this method’s return value to the ' - 'target(s)\n' - ' specified in the "as" clause of the statement, if any.\n' - '\n' - 'object.__exit__(self, exc_type, exc_value, traceback)\n' - '\n' - ' Exit the runtime context related to this object. The ' - 'parameters\n' - ' describe the exception that caused the context to be ' - 'exited. If the\n' - ' context was exited without an exception, all three ' - 'arguments will\n' - ' be "None".\n' - '\n' - ' If an exception is supplied, and the method wishes to ' - 'suppress the\n' - ' exception (i.e., prevent it from being propagated), it ' - 'should\n' - ' return a true value. Otherwise, the exception will be ' - 'processed\n' - ' normally upon exit from this method.\n' - '\n' - ' Note that "__exit__()" methods should not reraise the ' - 'passed-in\n' - ' exception; this is the caller’s responsibility.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with†statement\n' - ' The specification, background, and examples for the ' - 'Python "with"\n' - ' statement.\n' - '\n' - '\n' - 'Customizing positional arguments in class pattern matching\n' - '==========================================================\n' - '\n' - 'When using a class name in a pattern, positional arguments ' - 'in the\n' - 'pattern are not allowed by default, i.e. "case MyClass(x, ' - 'y)" is\n' - 'typically invalid without special support in "MyClass". To ' - 'be able to\n' - 'use that kind of pattern, the class needs to define a ' - '*__match_args__*\n' - 'attribute.\n' - '\n' - 'object.__match_args__\n' - '\n' - ' This class variable can be assigned a tuple of strings. ' - 'When this\n' - ' class is used in a class pattern with positional ' - 'arguments, each\n' - ' positional argument will be converted into a keyword ' - 'argument,\n' - ' using the corresponding value in *__match_args__* as the ' - 'keyword.\n' - ' The absence of this attribute is equivalent to setting it ' - 'to "()".\n' - '\n' - 'For example, if "MyClass.__match_args__" is "("left", ' - '"center",\n' - '"right")" that means that "case MyClass(x, y)" is equivalent ' - 'to "case\n' - 'MyClass(left=x, center=y)". Note that the number of ' - 'arguments in the\n' - 'pattern must be smaller than or equal to the number of ' - 'elements in\n' - '*__match_args__*; if it is larger, the pattern match attempt ' - 'will\n' - 'raise a "TypeError".\n' - '\n' - 'Added in version 3.10.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 634** - Structural Pattern Matching\n' - ' The specification for the Python "match" statement.\n' - '\n' - '\n' - 'Emulating buffer types\n' - '======================\n' - '\n' - 'The buffer protocol provides a way for Python objects to ' - 'expose\n' - 'efficient access to a low-level memory array. This protocol ' - 'is\n' - 'implemented by builtin types such as "bytes" and ' - '"memoryview", and\n' - 'third-party libraries may define additional buffer types.\n' - '\n' - 'While buffer types are usually implemented in C, it is also ' - 'possible\n' - 'to implement the protocol in Python.\n' - '\n' - 'object.__buffer__(self, flags)\n' - '\n' - ' Called when a buffer is requested from *self* (for ' - 'example, by the\n' - ' "memoryview" constructor). The *flags* argument is an ' - 'integer\n' - ' representing the kind of buffer requested, affecting for ' - 'example\n' - ' whether the returned buffer is read-only or writable.\n' - ' "inspect.BufferFlags" provides a convenient way to ' - 'interpret the\n' - ' flags. The method must return a "memoryview" object.\n' - '\n' - 'object.__release_buffer__(self, buffer)\n' - '\n' - ' Called when a buffer is no longer needed. The *buffer* ' - 'argument is\n' - ' a "memoryview" object that was previously returned by\n' - ' "__buffer__()". The method must release any resources ' - 'associated\n' - ' with the buffer. This method should return "None". Buffer ' - 'objects\n' - ' that do not need to perform any cleanup are not required ' - 'to\n' - ' implement this method.\n' - '\n' - 'Added in version 3.12.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 688** - Making the buffer protocol accessible in ' - 'Python\n' - ' Introduces the Python "__buffer__" and ' - '"__release_buffer__"\n' - ' methods.\n' - '\n' - ' "collections.abc.Buffer"\n' - ' ABC for buffer types.\n' - '\n' - '\n' - 'Annotations\n' - '===========\n' - '\n' - 'Functions, classes, and modules may contain *annotations*, ' - 'which are a\n' - 'way to associate information (usually *type hints*) with a ' - 'symbol.\n' - '\n' - 'object.__annotations__\n' - '\n' - ' This attribute contains the annotations for an object. It ' - 'is lazily\n' - ' evaluated, so accessing the attribute may execute ' - 'arbitrary code\n' - ' and raise exceptions. If evaluation is successful, the ' - 'attribute is\n' - ' set to a dictionary mapping from variable names to ' - 'annotations.\n' - '\n' - ' Changed in version 3.14: Annotations are now lazily ' - 'evaluated.\n' - '\n' - 'object.__annotate__(format)\n' - '\n' - ' An *annotate function*. Returns a new dictionary object ' - 'mapping\n' - ' attribute/parameter names to their annotation values.\n' - '\n' - ' Takes a format parameter specifying the format in which ' - 'annotations\n' - ' values should be provided. It must be a member of the\n' - ' "annotationlib.Format" enum, or an integer with a value\n' - ' corresponding to a member of the enum.\n' - '\n' - ' If an annotate function doesn’t support the requested ' - 'format, it\n' - ' must raise "NotImplementedError". Annotate functions must ' - 'always\n' - ' support "VALUE" format; they must not raise ' - '"NotImplementedError()"\n' - ' when called with this format.\n' - '\n' - ' When called with "VALUE" format, an annotate function ' - 'may raise\n' - ' "NameError"; it must not raise "NameError" when called ' - 'requesting\n' - ' any other format.\n' - '\n' - ' If an object does not have any annotations, ' - '"__annotate__" should\n' - ' preferably be set to "None" (it can’t be deleted), rather ' - 'than set\n' - ' to a function that returns an empty dict.\n' - '\n' - ' Added in version 3.14.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 649** — Deferred evaluation of annotation using ' - 'descriptors\n' - ' Introduces lazy evaluation of annotations and the ' - '"__annotate__"\n' - ' function.\n' - '\n' - '\n' - 'Special method lookup\n' - '=====================\n' - '\n' - 'For custom classes, implicit invocations of special methods ' - 'are only\n' - 'guaranteed to work correctly if defined on an object’s type, ' - 'not in\n' - 'the object’s instance dictionary. That behaviour is the ' - 'reason why\n' - 'the following code raises an exception:\n' - '\n' - ' >>> class C:\n' - ' ... pass\n' - ' ...\n' - ' >>> c = C()\n' - ' >>> c.__len__ = lambda: 5\n' - ' >>> len(c)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: object of type 'C' has no len()\n" - '\n' - 'The rationale behind this behaviour lies with a number of ' - 'special\n' - 'methods such as "__hash__()" and "__repr__()" that are ' - 'implemented by\n' - 'all objects, including type objects. If the implicit lookup ' - 'of these\n' - 'methods used the conventional lookup process, they would ' - 'fail when\n' - 'invoked on the type object itself:\n' - '\n' - ' >>> 1 .__hash__() == hash(1)\n' - ' True\n' - ' >>> int.__hash__() == hash(int)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: descriptor '__hash__' of 'int' object needs an " - 'argument\n' - '\n' - 'Incorrectly attempting to invoke an unbound method of a ' - 'class in this\n' - 'way is sometimes referred to as ‘metaclass confusion’, and ' - 'is avoided\n' - 'by bypassing the instance when looking up special methods:\n' - '\n' - ' >>> type(1).__hash__(1) == hash(1)\n' - ' True\n' - ' >>> type(int).__hash__(int) == hash(int)\n' - ' True\n' - '\n' - 'In addition to bypassing any instance attributes in the ' - 'interest of\n' - 'correctness, implicit special method lookup generally also ' - 'bypasses\n' - 'the "__getattribute__()" method even of the object’s ' - 'metaclass:\n' - '\n' - ' >>> class Meta(type):\n' - ' ... def __getattribute__(*args):\n' - ' ... print("Metaclass getattribute invoked")\n' - ' ... return type.__getattribute__(*args)\n' - ' ...\n' - ' >>> class C(object, metaclass=Meta):\n' - ' ... def __len__(self):\n' - ' ... return 10\n' - ' ... def __getattribute__(*args):\n' - ' ... print("Class getattribute invoked")\n' - ' ... return object.__getattribute__(*args)\n' - ' ...\n' - ' >>> c = C()\n' - ' >>> c.__len__() # Explicit lookup via ' - 'instance\n' - ' Class getattribute invoked\n' - ' 10\n' - ' >>> type(c).__len__(c) # Explicit lookup via ' - 'type\n' - ' Metaclass getattribute invoked\n' - ' 10\n' - ' >>> len(c) # Implicit lookup\n' - ' 10\n' - '\n' - 'Bypassing the "__getattribute__()" machinery in this fashion ' - 'provides\n' - 'significant scope for speed optimisations within the ' - 'interpreter, at\n' - 'the cost of some flexibility in the handling of special ' - 'methods (the\n' - 'special method *must* be set on the class object itself in ' - 'order to be\n' - 'consistently invoked by the interpreter).\n', - 'string-methods': 'String Methods\n' - '**************\n' - '\n' - 'Strings implement all of the common sequence operations, ' - 'along with\n' - 'the additional methods described below.\n' - '\n' - 'Strings also support two styles of string formatting, one ' - 'providing a\n' - 'large degree of flexibility and customization (see ' - '"str.format()",\n' - 'Format String Syntax and Custom String Formatting) and the ' - 'other based\n' - 'on C "printf" style formatting that handles a narrower ' - 'range of types\n' - 'and is slightly harder to use correctly, but is often ' - 'faster for the\n' - 'cases it can handle (printf-style String Formatting).\n' - '\n' - 'The Text Processing Services section of the standard ' - 'library covers a\n' - 'number of other modules that provide various text related ' - 'utilities\n' - '(including regular expression support in the "re" ' - 'module).\n' - '\n' - 'str.capitalize()\n' - '\n' - ' Return a copy of the string with its first character ' - 'capitalized\n' - ' and the rest lowercased.\n' - '\n' - ' Changed in version 3.8: The first character is now put ' - 'into\n' - ' titlecase rather than uppercase. This means that ' - 'characters like\n' - ' digraphs will only have their first letter capitalized, ' - 'instead of\n' - ' the full character.\n' - '\n' - 'str.casefold()\n' - '\n' - ' Return a casefolded copy of the string. Casefolded ' - 'strings may be\n' - ' used for caseless matching.\n' - '\n' - ' Casefolding is similar to lowercasing but more ' - 'aggressive because\n' - ' it is intended to remove all case distinctions in a ' - 'string. For\n' - ' example, the German lowercase letter "\'ß\'" is ' - 'equivalent to ""ss"".\n' - ' Since it is already lowercase, "lower()" would do ' - 'nothing to "\'ß\'";\n' - ' "casefold()" converts it to ""ss"".\n' - '\n' - ' The casefolding algorithm is described in section 3.13 ' - '‘Default\n' - ' Case Folding’ of the Unicode Standard.\n' - '\n' - ' Added in version 3.3.\n' - '\n' - 'str.center(width[, fillchar])\n' - '\n' - ' Return centered in a string of length *width*. Padding ' - 'is done\n' - ' using the specified *fillchar* (default is an ASCII ' - 'space). The\n' - ' original string is returned if *width* is less than or ' - 'equal to\n' - ' "len(s)".\n' - '\n' - 'str.count(sub[, start[, end]])\n' - '\n' - ' Return the number of non-overlapping occurrences of ' - 'substring *sub*\n' - ' in the range [*start*, *end*]. Optional arguments ' - '*start* and\n' - ' *end* are interpreted as in slice notation.\n' - '\n' - ' If *sub* is empty, returns the number of empty strings ' - 'between\n' - ' characters which is the length of the string plus one.\n' - '\n' - "str.encode(encoding='utf-8', errors='strict')\n" - '\n' - ' Return the string encoded to "bytes".\n' - '\n' - ' *encoding* defaults to "\'utf-8\'"; see Standard ' - 'Encodings for\n' - ' possible values.\n' - '\n' - ' *errors* controls how encoding errors are handled. If ' - '"\'strict\'"\n' - ' (the default), a "UnicodeError" exception is raised. ' - 'Other possible\n' - ' values are "\'ignore\'", "\'replace\'", ' - '"\'xmlcharrefreplace\'",\n' - ' "\'backslashreplace\'" and any other name registered ' - 'via\n' - ' "codecs.register_error()". See Error Handlers for ' - 'details.\n' - '\n' - ' For performance reasons, the value of *errors* is not ' - 'checked for\n' - ' validity unless an encoding error actually occurs, ' - 'Python\n' - ' Development Mode is enabled or a debug build is used.\n' - '\n' - ' Changed in version 3.1: Added support for keyword ' - 'arguments.\n' - '\n' - ' Changed in version 3.9: The value of the *errors* ' - 'argument is now\n' - ' checked in Python Development Mode and in debug mode.\n' - '\n' - 'str.endswith(suffix[, start[, end]])\n' - '\n' - ' Return "True" if the string ends with the specified ' - '*suffix*,\n' - ' otherwise return "False". *suffix* can also be a tuple ' - 'of suffixes\n' - ' to look for. With optional *start*, test beginning at ' - 'that\n' - ' position. With optional *end*, stop comparing at that ' - 'position.\n' - '\n' - 'str.expandtabs(tabsize=8)\n' - '\n' - ' Return a copy of the string where all tab characters ' - 'are replaced\n' - ' by one or more spaces, depending on the current column ' - 'and the\n' - ' given tab size. Tab positions occur every *tabsize* ' - 'characters\n' - ' (default is 8, giving tab positions at columns 0, 8, 16 ' - 'and so on).\n' - ' To expand the string, the current column is set to zero ' - 'and the\n' - ' string is examined character by character. If the ' - 'character is a\n' - ' tab ("\\t"), one or more space characters are inserted ' - 'in the result\n' - ' until the current column is equal to the next tab ' - 'position. (The\n' - ' tab character itself is not copied.) If the character ' - 'is a newline\n' - ' ("\\n") or return ("\\r"), it is copied and the current ' - 'column is\n' - ' reset to zero. Any other character is copied unchanged ' - 'and the\n' - ' current column is incremented by one regardless of how ' - 'the\n' - ' character is represented when printed.\n' - '\n' - " >>> '01\\t012\\t0123\\t01234'.expandtabs()\n" - " '01 012 0123 01234'\n" - " >>> '01\\t012\\t0123\\t01234'.expandtabs(4)\n" - " '01 012 0123 01234'\n" - '\n' - 'str.find(sub[, start[, end]])\n' - '\n' - ' Return the lowest index in the string where substring ' - '*sub* is\n' - ' found within the slice "s[start:end]". Optional ' - 'arguments *start*\n' - ' and *end* are interpreted as in slice notation. Return ' - '"-1" if\n' - ' *sub* is not found.\n' - '\n' - ' Note:\n' - '\n' - ' The "find()" method should be used only if you need ' - 'to know the\n' - ' position of *sub*. To check if *sub* is a substring ' - 'or not, use\n' - ' the "in" operator:\n' - '\n' - " >>> 'Py' in 'Python'\n" - ' True\n' - '\n' - 'str.format(*args, **kwargs)\n' - '\n' - ' Perform a string formatting operation. The string on ' - 'which this\n' - ' method is called can contain literal text or ' - 'replacement fields\n' - ' delimited by braces "{}". Each replacement field ' - 'contains either\n' - ' the numeric index of a positional argument, or the name ' - 'of a\n' - ' keyword argument. Returns a copy of the string where ' - 'each\n' - ' replacement field is replaced with the string value of ' - 'the\n' - ' corresponding argument.\n' - '\n' - ' >>> "The sum of 1 + 2 is {0}".format(1+2)\n' - " 'The sum of 1 + 2 is 3'\n" - '\n' - ' See Format String Syntax for a description of the ' - 'various\n' - ' formatting options that can be specified in format ' - 'strings.\n' - '\n' - ' Note:\n' - '\n' - ' When formatting a number ("int", "float", "complex",\n' - ' "decimal.Decimal" and subclasses) with the "n" type ' - '(ex:\n' - ' "\'{:n}\'.format(1234)"), the function temporarily ' - 'sets the\n' - ' "LC_CTYPE" locale to the "LC_NUMERIC" locale to ' - 'decode\n' - ' "decimal_point" and "thousands_sep" fields of ' - '"localeconv()" if\n' - ' they are non-ASCII or longer than 1 byte, and the ' - '"LC_NUMERIC"\n' - ' locale is different than the "LC_CTYPE" locale. This ' - 'temporary\n' - ' change affects other threads.\n' - '\n' - ' Changed in version 3.7: When formatting a number with ' - 'the "n" type,\n' - ' the function sets temporarily the "LC_CTYPE" locale to ' - 'the\n' - ' "LC_NUMERIC" locale in some cases.\n' - '\n' - 'str.format_map(mapping, /)\n' - '\n' - ' Similar to "str.format(**mapping)", except that ' - '"mapping" is used\n' - ' directly and not copied to a "dict". This is useful if ' - 'for example\n' - ' "mapping" is a dict subclass:\n' - '\n' - ' >>> class Default(dict):\n' - ' ... def __missing__(self, key):\n' - ' ... return key\n' - ' ...\n' - " >>> '{name} was born in " - "{country}'.format_map(Default(name='Guido'))\n" - " 'Guido was born in country'\n" - '\n' - ' Added in version 3.2.\n' - '\n' - 'str.index(sub[, start[, end]])\n' - '\n' - ' Like "find()", but raise "ValueError" when the ' - 'substring is not\n' - ' found.\n' - '\n' - 'str.isalnum()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'alphanumeric and\n' - ' there is at least one character, "False" otherwise. A ' - 'character\n' - ' "c" is alphanumeric if one of the following returns ' - '"True":\n' - ' "c.isalpha()", "c.isdecimal()", "c.isdigit()", or ' - '"c.isnumeric()".\n' - '\n' - 'str.isalpha()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'alphabetic and\n' - ' there is at least one character, "False" otherwise. ' - 'Alphabetic\n' - ' characters are those characters defined in the Unicode ' - 'character\n' - ' database as “Letterâ€, i.e., those with general category ' - 'property\n' - ' being one of “Lmâ€, “Ltâ€, “Luâ€, “Llâ€, or “Loâ€. Note ' - 'that this is\n' - ' different from the Alphabetic property defined in the ' - 'section 4.10\n' - ' ‘Letters, Alphabetic, and Ideographic’ of the Unicode ' - 'Standard.\n' - '\n' - 'str.isascii()\n' - '\n' - ' Return "True" if the string is empty or all characters ' - 'in the\n' - ' string are ASCII, "False" otherwise. ASCII characters ' - 'have code\n' - ' points in the range U+0000-U+007F.\n' - '\n' - ' Added in version 3.7.\n' - '\n' - 'str.isdecimal()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'decimal\n' - ' characters and there is at least one character, "False" ' - 'otherwise.\n' - ' Decimal characters are those that can be used to form ' - 'numbers in\n' - ' base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. ' - 'Formally a decimal\n' - ' character is a character in the Unicode General ' - 'Category “Ndâ€.\n' - '\n' - 'str.isdigit()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'digits and there\n' - ' is at least one character, "False" otherwise. Digits ' - 'include\n' - ' decimal characters and digits that need special ' - 'handling, such as\n' - ' the compatibility superscript digits. This covers ' - 'digits which\n' - ' cannot be used to form numbers in base 10, like the ' - 'Kharosthi\n' - ' numbers. Formally, a digit is a character that has the ' - 'property\n' - ' value Numeric_Type=Digit or Numeric_Type=Decimal.\n' - '\n' - 'str.isidentifier()\n' - '\n' - ' Return "True" if the string is a valid identifier ' - 'according to the\n' - ' language definition, section Identifiers and keywords.\n' - '\n' - ' "keyword.iskeyword()" can be used to test whether ' - 'string "s" is a\n' - ' reserved identifier, such as "def" and "class".\n' - '\n' - ' Example:\n' - '\n' - ' >>> from keyword import iskeyword\n' - '\n' - " >>> 'hello'.isidentifier(), iskeyword('hello')\n" - ' (True, False)\n' - " >>> 'def'.isidentifier(), iskeyword('def')\n" - ' (True, True)\n' - '\n' - 'str.islower()\n' - '\n' - ' Return "True" if all cased characters [4] in the string ' - 'are\n' - ' lowercase and there is at least one cased character, ' - '"False"\n' - ' otherwise.\n' - '\n' - 'str.isnumeric()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'numeric\n' - ' characters, and there is at least one character, ' - '"False" otherwise.\n' - ' Numeric characters include digit characters, and all ' - 'characters\n' - ' that have the Unicode numeric value property, e.g. ' - 'U+2155, VULGAR\n' - ' FRACTION ONE FIFTH. Formally, numeric characters are ' - 'those with\n' - ' the property value Numeric_Type=Digit, ' - 'Numeric_Type=Decimal or\n' - ' Numeric_Type=Numeric.\n' - '\n' - 'str.isprintable()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'printable or the\n' - ' string is empty, "False" otherwise. Nonprintable ' - 'characters are\n' - ' those characters defined in the Unicode character ' - 'database as\n' - ' “Other†or “Separatorâ€, excepting the ASCII space ' - '(0x20) which is\n' - ' considered printable. (Note that printable characters ' - 'in this\n' - ' context are those which should not be escaped when ' - '"repr()" is\n' - ' invoked on a string. It has no bearing on the handling ' - 'of strings\n' - ' written to "sys.stdout" or "sys.stderr".)\n' - '\n' - 'str.isspace()\n' - '\n' - ' Return "True" if there are only whitespace characters ' - 'in the string\n' - ' and there is at least one character, "False" ' - 'otherwise.\n' - '\n' - ' A character is *whitespace* if in the Unicode character ' - 'database\n' - ' (see "unicodedata"), either its general category is ' - '"Zs"\n' - ' (“Separator, spaceâ€), or its bidirectional class is one ' - 'of "WS",\n' - ' "B", or "S".\n' - '\n' - 'str.istitle()\n' - '\n' - ' Return "True" if the string is a titlecased string and ' - 'there is at\n' - ' least one character, for example uppercase characters ' - 'may only\n' - ' follow uncased characters and lowercase characters only ' - 'cased ones.\n' - ' Return "False" otherwise.\n' - '\n' - 'str.isupper()\n' - '\n' - ' Return "True" if all cased characters [4] in the string ' - 'are\n' - ' uppercase and there is at least one cased character, ' - '"False"\n' - ' otherwise.\n' - '\n' - " >>> 'BANANA'.isupper()\n" - ' True\n' - " >>> 'banana'.isupper()\n" - ' False\n' - " >>> 'baNana'.isupper()\n" - ' False\n' - " >>> ' '.isupper()\n" - ' False\n' - '\n' - 'str.join(iterable)\n' - '\n' - ' Return a string which is the concatenation of the ' - 'strings in\n' - ' *iterable*. A "TypeError" will be raised if there are ' - 'any non-\n' - ' string values in *iterable*, including "bytes" ' - 'objects. The\n' - ' separator between elements is the string providing this ' - 'method.\n' - '\n' - 'str.ljust(width[, fillchar])\n' - '\n' - ' Return the string left justified in a string of length ' - '*width*.\n' - ' Padding is done using the specified *fillchar* (default ' - 'is an ASCII\n' - ' space). The original string is returned if *width* is ' - 'less than or\n' - ' equal to "len(s)".\n' - '\n' - 'str.lower()\n' - '\n' - ' Return a copy of the string with all the cased ' - 'characters [4]\n' - ' converted to lowercase.\n' - '\n' - ' The lowercasing algorithm used is described in section ' - '3.13\n' - ' ‘Default Case Folding’ of the Unicode Standard.\n' - '\n' - 'str.lstrip([chars])\n' - '\n' - ' Return a copy of the string with leading characters ' - 'removed. The\n' - ' *chars* argument is a string specifying the set of ' - 'characters to be\n' - ' removed. If omitted or "None", the *chars* argument ' - 'defaults to\n' - ' removing whitespace. The *chars* argument is not a ' - 'prefix; rather,\n' - ' all combinations of its values are stripped:\n' - '\n' - " >>> ' spacious '.lstrip()\n" - " 'spacious '\n" - " >>> 'www.example.com'.lstrip('cmowz.')\n" - " 'example.com'\n" - '\n' - ' See "str.removeprefix()" for a method that will remove ' - 'a single\n' - ' prefix string rather than all of a set of characters. ' - 'For example:\n' - '\n' - " >>> 'Arthur: three!'.lstrip('Arthur: ')\n" - " 'ee!'\n" - " >>> 'Arthur: three!'.removeprefix('Arthur: ')\n" - " 'three!'\n" - '\n' - 'static str.maketrans(x[, y[, z]])\n' - '\n' - ' This static method returns a translation table usable ' - 'for\n' - ' "str.translate()".\n' - '\n' - ' If there is only one argument, it must be a dictionary ' - 'mapping\n' - ' Unicode ordinals (integers) or characters (strings of ' - 'length 1) to\n' - ' Unicode ordinals, strings (of arbitrary lengths) or ' - '"None".\n' - ' Character keys will then be converted to ordinals.\n' - '\n' - ' If there are two arguments, they must be strings of ' - 'equal length,\n' - ' and in the resulting dictionary, each character in x ' - 'will be mapped\n' - ' to the character at the same position in y. If there ' - 'is a third\n' - ' argument, it must be a string, whose characters will be ' - 'mapped to\n' - ' "None" in the result.\n' - '\n' - 'str.partition(sep)\n' - '\n' - ' Split the string at the first occurrence of *sep*, and ' - 'return a\n' - ' 3-tuple containing the part before the separator, the ' - 'separator\n' - ' itself, and the part after the separator. If the ' - 'separator is not\n' - ' found, return a 3-tuple containing the string itself, ' - 'followed by\n' - ' two empty strings.\n' - '\n' - 'str.removeprefix(prefix, /)\n' - '\n' - ' If the string starts with the *prefix* string, return\n' - ' "string[len(prefix):]". Otherwise, return a copy of the ' - 'original\n' - ' string:\n' - '\n' - " >>> 'TestHook'.removeprefix('Test')\n" - " 'Hook'\n" - " >>> 'BaseTestCase'.removeprefix('Test')\n" - " 'BaseTestCase'\n" - '\n' - ' Added in version 3.9.\n' - '\n' - 'str.removesuffix(suffix, /)\n' - '\n' - ' If the string ends with the *suffix* string and that ' - '*suffix* is\n' - ' not empty, return "string[:-len(suffix)]". Otherwise, ' - 'return a copy\n' - ' of the original string:\n' - '\n' - " >>> 'MiscTests'.removesuffix('Tests')\n" - " 'Misc'\n" - " >>> 'TmpDirMixin'.removesuffix('Tests')\n" - " 'TmpDirMixin'\n" - '\n' - ' Added in version 3.9.\n' - '\n' - 'str.replace(old, new, count=-1)\n' - '\n' - ' Return a copy of the string with all occurrences of ' - 'substring *old*\n' - ' replaced by *new*. If *count* is given, only the first ' - '*count*\n' - ' occurrences are replaced. If *count* is not specified ' - 'or "-1", then\n' - ' all occurrences are replaced.\n' - '\n' - ' Changed in version 3.13: *count* is now supported as a ' - 'keyword\n' - ' argument.\n' - '\n' - 'str.rfind(sub[, start[, end]])\n' - '\n' - ' Return the highest index in the string where substring ' - '*sub* is\n' - ' found, such that *sub* is contained within ' - '"s[start:end]".\n' - ' Optional arguments *start* and *end* are interpreted as ' - 'in slice\n' - ' notation. Return "-1" on failure.\n' - '\n' - 'str.rindex(sub[, start[, end]])\n' - '\n' - ' Like "rfind()" but raises "ValueError" when the ' - 'substring *sub* is\n' - ' not found.\n' - '\n' - 'str.rjust(width[, fillchar])\n' - '\n' - ' Return the string right justified in a string of length ' - '*width*.\n' - ' Padding is done using the specified *fillchar* (default ' - 'is an ASCII\n' - ' space). The original string is returned if *width* is ' - 'less than or\n' - ' equal to "len(s)".\n' - '\n' - 'str.rpartition(sep)\n' - '\n' - ' Split the string at the last occurrence of *sep*, and ' - 'return a\n' - ' 3-tuple containing the part before the separator, the ' - 'separator\n' - ' itself, and the part after the separator. If the ' - 'separator is not\n' - ' found, return a 3-tuple containing two empty strings, ' - 'followed by\n' - ' the string itself.\n' - '\n' - 'str.rsplit(sep=None, maxsplit=-1)\n' - '\n' - ' Return a list of the words in the string, using *sep* ' - 'as the\n' - ' delimiter string. If *maxsplit* is given, at most ' - '*maxsplit* splits\n' - ' are done, the *rightmost* ones. If *sep* is not ' - 'specified or\n' - ' "None", any whitespace string is a separator. Except ' - 'for splitting\n' - ' from the right, "rsplit()" behaves like "split()" which ' - 'is\n' - ' described in detail below.\n' - '\n' - 'str.rstrip([chars])\n' - '\n' - ' Return a copy of the string with trailing characters ' - 'removed. The\n' - ' *chars* argument is a string specifying the set of ' - 'characters to be\n' - ' removed. If omitted or "None", the *chars* argument ' - 'defaults to\n' - ' removing whitespace. The *chars* argument is not a ' - 'suffix; rather,\n' - ' all combinations of its values are stripped:\n' - '\n' - " >>> ' spacious '.rstrip()\n" - " ' spacious'\n" - " >>> 'mississippi'.rstrip('ipz')\n" - " 'mississ'\n" - '\n' - ' See "str.removesuffix()" for a method that will remove ' - 'a single\n' - ' suffix string rather than all of a set of characters. ' - 'For example:\n' - '\n' - " >>> 'Monty Python'.rstrip(' Python')\n" - " 'M'\n" - " >>> 'Monty Python'.removesuffix(' Python')\n" - " 'Monty'\n" - '\n' - 'str.split(sep=None, maxsplit=-1)\n' - '\n' - ' Return a list of the words in the string, using *sep* ' - 'as the\n' - ' delimiter string. If *maxsplit* is given, at most ' - '*maxsplit*\n' - ' splits are done (thus, the list will have at most ' - '"maxsplit+1"\n' - ' elements). If *maxsplit* is not specified or "-1", ' - 'then there is\n' - ' no limit on the number of splits (all possible splits ' - 'are made).\n' - '\n' - ' If *sep* is given, consecutive delimiters are not ' - 'grouped together\n' - ' and are deemed to delimit empty strings (for example,\n' - ' "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', ' - '\'2\']"). The *sep* argument\n' - ' may consist of multiple characters as a single ' - 'delimiter (to split\n' - ' with multiple delimiters, use "re.split()"). Splitting ' - 'an empty\n' - ' string with a specified separator returns "[\'\']".\n' - '\n' - ' For example:\n' - '\n' - " >>> '1,2,3'.split(',')\n" - " ['1', '2', '3']\n" - " >>> '1,2,3'.split(',', maxsplit=1)\n" - " ['1', '2,3']\n" - " >>> '1,2,,3,'.split(',')\n" - " ['1', '2', '', '3', '']\n" - " >>> '1<>2<>3<4'.split('<>')\n" - " ['1', '2', '3<4']\n" - '\n' - ' If *sep* is not specified or is "None", a different ' - 'splitting\n' - ' algorithm is applied: runs of consecutive whitespace ' - 'are regarded\n' - ' as a single separator, and the result will contain no ' - 'empty strings\n' - ' at the start or end if the string has leading or ' - 'trailing\n' - ' whitespace. Consequently, splitting an empty string or ' - 'a string\n' - ' consisting of just whitespace with a "None" separator ' - 'returns "[]".\n' - '\n' - ' For example:\n' - '\n' - " >>> '1 2 3'.split()\n" - " ['1', '2', '3']\n" - " >>> '1 2 3'.split(maxsplit=1)\n" - " ['1', '2 3']\n" - " >>> ' 1 2 3 '.split()\n" - " ['1', '2', '3']\n" - '\n' - 'str.splitlines(keepends=False)\n' - '\n' - ' Return a list of the lines in the string, breaking at ' - 'line\n' - ' boundaries. Line breaks are not included in the ' - 'resulting list\n' - ' unless *keepends* is given and true.\n' - '\n' - ' This method splits on the following line boundaries. ' - 'In\n' - ' particular, the boundaries are a superset of *universal ' - 'newlines*.\n' - '\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | Representation | ' - 'Description |\n' - ' ' - '|=========================|===============================|\n' - ' | "\\n" | Line ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\r" | Carriage ' - 'Return |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\r\\n" | Carriage Return + Line ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\v" or "\\x0b" | Line ' - 'Tabulation |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\f" or "\\x0c" | Form ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1c" | File ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1d" | Group ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1e" | Record ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x85" | Next Line (C1 Control ' - 'Code) |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\u2028" | Line ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\u2029" | Paragraph ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - '\n' - ' Changed in version 3.2: "\\v" and "\\f" added to list ' - 'of line\n' - ' boundaries.\n' - '\n' - ' For example:\n' - '\n' - " >>> 'ab c\\n\\nde fg\\rkl\\r\\n'.splitlines()\n" - " ['ab c', '', 'de fg', 'kl']\n" - " >>> 'ab c\\n\\nde " - "fg\\rkl\\r\\n'.splitlines(keepends=True)\n" - " ['ab c\\n', '\\n', 'de fg\\r', 'kl\\r\\n']\n" - '\n' - ' Unlike "split()" when a delimiter string *sep* is ' - 'given, this\n' - ' method returns an empty list for the empty string, and ' - 'a terminal\n' - ' line break does not result in an extra line:\n' - '\n' - ' >>> "".splitlines()\n' - ' []\n' - ' >>> "One line\\n".splitlines()\n' - " ['One line']\n" - '\n' - ' For comparison, "split(\'\\n\')" gives:\n' - '\n' - " >>> ''.split('\\n')\n" - " ['']\n" - " >>> 'Two lines\\n'.split('\\n')\n" - " ['Two lines', '']\n" - '\n' - 'str.startswith(prefix[, start[, end]])\n' - '\n' - ' Return "True" if string starts with the *prefix*, ' - 'otherwise return\n' - ' "False". *prefix* can also be a tuple of prefixes to ' - 'look for.\n' - ' With optional *start*, test string beginning at that ' - 'position.\n' - ' With optional *end*, stop comparing string at that ' - 'position.\n' - '\n' - 'str.strip([chars])\n' - '\n' - ' Return a copy of the string with the leading and ' - 'trailing\n' - ' characters removed. The *chars* argument is a string ' - 'specifying the\n' - ' set of characters to be removed. If omitted or "None", ' - 'the *chars*\n' - ' argument defaults to removing whitespace. The *chars* ' - 'argument is\n' - ' not a prefix or suffix; rather, all combinations of its ' - 'values are\n' - ' stripped:\n' - '\n' - " >>> ' spacious '.strip()\n" - " 'spacious'\n" - " >>> 'www.example.com'.strip('cmowz.')\n" - " 'example'\n" - '\n' - ' The outermost leading and trailing *chars* argument ' - 'values are\n' - ' stripped from the string. Characters are removed from ' - 'the leading\n' - ' end until reaching a string character that is not ' - 'contained in the\n' - ' set of characters in *chars*. A similar action takes ' - 'place on the\n' - ' trailing end. For example:\n' - '\n' - " >>> comment_string = '#....... Section 3.2.1 Issue " - "#32 .......'\n" - " >>> comment_string.strip('.#! ')\n" - " 'Section 3.2.1 Issue #32'\n" - '\n' - 'str.swapcase()\n' - '\n' - ' Return a copy of the string with uppercase characters ' - 'converted to\n' - ' lowercase and vice versa. Note that it is not ' - 'necessarily true that\n' - ' "s.swapcase().swapcase() == s".\n' - '\n' - 'str.title()\n' - '\n' - ' Return a titlecased version of the string where words ' - 'start with an\n' - ' uppercase character and the remaining characters are ' - 'lowercase.\n' - '\n' - ' For example:\n' - '\n' - " >>> 'Hello world'.title()\n" - " 'Hello World'\n" - '\n' - ' The algorithm uses a simple language-independent ' - 'definition of a\n' - ' word as groups of consecutive letters. The definition ' - 'works in\n' - ' many contexts but it means that apostrophes in ' - 'contractions and\n' - ' possessives form word boundaries, which may not be the ' - 'desired\n' - ' result:\n' - '\n' - ' >>> "they\'re bill\'s friends from the UK".title()\n' - ' "They\'Re Bill\'S Friends From The Uk"\n' - '\n' - ' The "string.capwords()" function does not have this ' - 'problem, as it\n' - ' splits words on spaces only.\n' - '\n' - ' Alternatively, a workaround for apostrophes can be ' - 'constructed\n' - ' using regular expressions:\n' - '\n' - ' >>> import re\n' - ' >>> def titlecase(s):\n' - ' ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n' - ' ... lambda mo: ' - 'mo.group(0).capitalize(),\n' - ' ... s)\n' - ' ...\n' - ' >>> titlecase("they\'re bill\'s friends.")\n' - ' "They\'re Bill\'s Friends."\n' - '\n' - 'str.translate(table)\n' - '\n' - ' Return a copy of the string in which each character has ' - 'been mapped\n' - ' through the given translation table. The table must be ' - 'an object\n' - ' that implements indexing via "__getitem__()", typically ' - 'a *mapping*\n' - ' or *sequence*. When indexed by a Unicode ordinal (an ' - 'integer), the\n' - ' table object can do any of the following: return a ' - 'Unicode ordinal\n' - ' or a string, to map the character to one or more other ' - 'characters;\n' - ' return "None", to delete the character from the return ' - 'string; or\n' - ' raise a "LookupError" exception, to map the character ' - 'to itself.\n' - '\n' - ' You can use "str.maketrans()" to create a translation ' - 'map from\n' - ' character-to-character mappings in different formats.\n' - '\n' - ' See also the "codecs" module for a more flexible ' - 'approach to custom\n' - ' character mappings.\n' - '\n' - 'str.upper()\n' - '\n' - ' Return a copy of the string with all the cased ' - 'characters [4]\n' - ' converted to uppercase. Note that ' - '"s.upper().isupper()" might be\n' - ' "False" if "s" contains uncased characters or if the ' - 'Unicode\n' - ' category of the resulting character(s) is not “Lu†' - '(Letter,\n' - ' uppercase), but e.g. “Lt†(Letter, titlecase).\n' - '\n' - ' The uppercasing algorithm used is described in section ' - '3.13\n' - ' ‘Default Case Folding’ of the Unicode Standard.\n' - '\n' - 'str.zfill(width)\n' - '\n' - ' Return a copy of the string left filled with ASCII ' - '"\'0\'" digits to\n' - ' make a string of length *width*. A leading sign prefix\n' - ' ("\'+\'"/"\'-\'") is handled by inserting the padding ' - '*after* the sign\n' - ' character rather than before. The original string is ' - 'returned if\n' - ' *width* is less than or equal to "len(s)".\n' - '\n' - ' For example:\n' - '\n' - ' >>> "42".zfill(5)\n' - " '00042'\n" - ' >>> "-42".zfill(5)\n' - " '-0042'\n", - 'strings': 'String and Bytes literals\n' - '*************************\n' - '\n' - 'String literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' stringliteral ::= [stringprefix](shortstring | longstring)\n' - ' stringprefix ::= "r" | "u" | "R" | "U" | "f" | "F"\n' - ' | "fr" | "Fr" | "fR" | "FR" | "rf" | "rF" | ' - '"Rf" | "RF"\n' - ' shortstring ::= "\'" shortstringitem* "\'" | \'"\' ' - 'shortstringitem* \'"\'\n' - ' longstring ::= "\'\'\'" longstringitem* "\'\'\'" | ' - '\'"""\' longstringitem* \'"""\'\n' - ' shortstringitem ::= shortstringchar | stringescapeseq\n' - ' longstringitem ::= longstringchar | stringescapeseq\n' - ' shortstringchar ::= \n' - ' longstringchar ::= \n' - ' stringescapeseq ::= "\\" \n' - '\n' - ' bytesliteral ::= bytesprefix(shortbytes | longbytes)\n' - ' bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | ' - '"rb" | "rB" | "Rb" | "RB"\n' - ' shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' ' - 'shortbytesitem* \'"\'\n' - ' longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' ' - 'longbytesitem* \'"""\'\n' - ' shortbytesitem ::= shortbyteschar | bytesescapeseq\n' - ' longbytesitem ::= longbyteschar | bytesescapeseq\n' - ' shortbyteschar ::= \n' - ' longbyteschar ::= \n' - ' bytesescapeseq ::= "\\" \n' - '\n' - 'One syntactic restriction not indicated by these productions is ' - 'that\n' - 'whitespace is not allowed between the "stringprefix" or ' - '"bytesprefix"\n' - 'and the rest of the literal. The source character set is defined ' - 'by\n' - 'the encoding declaration; it is UTF-8 if no encoding declaration ' - 'is\n' - 'given in the source file; see section Encoding declarations.\n' - '\n' - 'In plain English: Both types of literals can be enclosed in ' - 'matching\n' - 'single quotes ("\'") or double quotes ("""). They can also be ' - 'enclosed\n' - 'in matching groups of three single or double quotes (these are\n' - 'generally referred to as *triple-quoted strings*). The backslash ' - '("\\")\n' - 'character is used to give special meaning to otherwise ordinary\n' - 'characters like "n", which means ‘newline’ when escaped ("\\n"). ' - 'It can\n' - 'also be used to escape characters that otherwise have a special\n' - 'meaning, such as newline, backslash itself, or the quote ' - 'character.\n' - 'See escape sequences below for examples.\n' - '\n' - 'Bytes literals are always prefixed with "\'b\'" or "\'B\'"; they ' - 'produce\n' - 'an instance of the "bytes" type instead of the "str" type. They ' - 'may\n' - 'only contain ASCII characters; bytes with a numeric value of 128 ' - 'or\n' - 'greater must be expressed with escapes.\n' - '\n' - 'Both string and bytes literals may optionally be prefixed with a\n' - 'letter "\'r\'" or "\'R\'"; such constructs are called *raw ' - 'string\n' - 'literals* and *raw bytes literals* respectively and treat ' - 'backslashes\n' - 'as literal characters. As a result, in raw string literals, ' - '"\'\\U\'"\n' - 'and "\'\\u\'" escapes are not treated specially.\n' - '\n' - 'Added in version 3.3: The "\'rb\'" prefix of raw bytes literals ' - 'has been\n' - 'added as a synonym of "\'br\'".Support for the unicode legacy ' - 'literal\n' - '("u\'value\'") was reintroduced to simplify the maintenance of ' - 'dual\n' - 'Python 2.x and 3.x codebases. See **PEP 414** for more ' - 'information.\n' - '\n' - 'A string literal with "\'f\'" or "\'F\'" in its prefix is a ' - '*formatted\n' - 'string literal*; see f-strings. The "\'f\'" may be combined with ' - '"\'r\'",\n' - 'but not with "\'b\'" or "\'u\'", therefore raw formatted strings ' - 'are\n' - 'possible, but formatted bytes literals are not.\n' - '\n' - 'In triple-quoted literals, unescaped newlines and quotes are ' - 'allowed\n' - '(and are retained), except that three unescaped quotes in a row\n' - 'terminate the literal. (A “quote†is the character used to open ' - 'the\n' - 'literal, i.e. either "\'" or """.)\n' - '\n' - '\n' - 'Escape sequences\n' - '================\n' - '\n' - 'Unless an "\'r\'" or "\'R\'" prefix is present, escape sequences ' - 'in string\n' - 'and bytes literals are interpreted according to rules similar to ' - 'those\n' - 'used by Standard C. The recognized escape sequences are:\n' - '\n' - '+---------------------------+-----------------------------------+---------+\n' - '| Escape Sequence | Meaning | ' - 'Notes |\n' - '|===========================|===================================|=========|\n' - '| "\\" | Backslash and newline ignored ' - '| (1) |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\\\" | Backslash ' - '("\\") | |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\\'" | Single quote ' - '("\'") | |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\"" | Double quote (""") ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\a" | ASCII Bell (BEL) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\b" | ASCII Backspace (BS) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\f" | ASCII Formfeed (FF) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\n" | ASCII Linefeed (LF) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\r" | ASCII Carriage Return (CR) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\t" | ASCII Horizontal Tab (TAB) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\v" | ASCII Vertical Tab (VT) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\*ooo*" | Character with octal value *ooo* ' - '| (2,4) |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\x*hh*" | Character with hex value *hh* ' - '| (3,4) |\n' - '+---------------------------+-----------------------------------+---------+\n' - '\n' - 'Escape sequences only recognized in string literals are:\n' - '\n' - '+---------------------------+-----------------------------------+---------+\n' - '| Escape Sequence | Meaning | ' - 'Notes |\n' - '|===========================|===================================|=========|\n' - '| "\\N{*name*}" | Character named *name* in the ' - '| (5) |\n' - '| | Unicode database ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\u*xxxx*" | Character with 16-bit hex value ' - '| (6) |\n' - '| | *xxxx* ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\U*xxxxxxxx*" | Character with 32-bit hex value ' - '| (7) |\n' - '| | *xxxxxxxx* ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '\n' - 'Notes:\n' - '\n' - '1. A backslash can be added at the end of a line to ignore the\n' - ' newline:\n' - '\n' - " >>> 'This string will not include \\\n" - " ... backslashes or newline characters.'\n" - " 'This string will not include backslashes or newline " - "characters.'\n" - '\n' - ' The same result can be achieved using triple-quoted strings, ' - 'or\n' - ' parentheses and string literal concatenation.\n' - '\n' - '2. As in Standard C, up to three octal digits are accepted.\n' - '\n' - ' Changed in version 3.11: Octal escapes with value larger than\n' - ' "0o377" produce a "DeprecationWarning".\n' - '\n' - ' Changed in version 3.12: Octal escapes with value larger than\n' - ' "0o377" produce a "SyntaxWarning". In a future Python version ' - 'they\n' - ' will be eventually a "SyntaxError".\n' - '\n' - '3. Unlike in Standard C, exactly two hex digits are required.\n' - '\n' - '4. In a bytes literal, hexadecimal and octal escapes denote the ' - 'byte\n' - ' with the given value. In a string literal, these escapes ' - 'denote a\n' - ' Unicode character with the given value.\n' - '\n' - '5. Changed in version 3.3: Support for name aliases [1] has been\n' - ' added.\n' - '\n' - '6. Exactly four hex digits are required.\n' - '\n' - '7. Any Unicode character can be encoded this way. Exactly eight ' - 'hex\n' - ' digits are required.\n' - '\n' - 'Unlike Standard C, all unrecognized escape sequences are left in ' - 'the\n' - 'string unchanged, i.e., *the backslash is left in the result*. ' - '(This\n' - 'behavior is useful when debugging: if an escape sequence is ' - 'mistyped,\n' - 'the resulting output is more easily recognized as broken.) It is ' - 'also\n' - 'important to note that the escape sequences only recognized in ' - 'string\n' - 'literals fall into the category of unrecognized escapes for ' - 'bytes\n' - 'literals.\n' - '\n' - 'Changed in version 3.6: Unrecognized escape sequences produce a\n' - '"DeprecationWarning".\n' - '\n' - 'Changed in version 3.12: Unrecognized escape sequences produce a\n' - '"SyntaxWarning". In a future Python version they will be ' - 'eventually a\n' - '"SyntaxError".\n' - '\n' - 'Even in a raw literal, quotes can be escaped with a backslash, ' - 'but the\n' - 'backslash remains in the result; for example, "r"\\""" is a ' - 'valid\n' - 'string literal consisting of two characters: a backslash and a ' - 'double\n' - 'quote; "r"\\"" is not a valid string literal (even a raw string ' - 'cannot\n' - 'end in an odd number of backslashes). Specifically, *a raw ' - 'literal\n' - 'cannot end in a single backslash* (since the backslash would ' - 'escape\n' - 'the following quote character). Note also that a single ' - 'backslash\n' - 'followed by a newline is interpreted as those two characters as ' - 'part\n' - 'of the literal, *not* as a line continuation.\n', - 'subscriptions': 'Subscriptions\n' - '*************\n' - '\n' - 'The subscription of an instance of a container class will ' - 'generally\n' - 'select an element from the container. The subscription of a ' - '*generic\n' - 'class* will generally return a GenericAlias object.\n' - '\n' - ' subscription ::= primary "[" flexible_expression_list ' - '"]"\n' - '\n' - 'When an object is subscripted, the interpreter will ' - 'evaluate the\n' - 'primary and the expression list.\n' - '\n' - 'The primary must evaluate to an object that supports ' - 'subscription. An\n' - 'object may support subscription through defining one or ' - 'both of\n' - '"__getitem__()" and "__class_getitem__()". When the primary ' - 'is\n' - 'subscripted, the evaluated result of the expression list ' - 'will be\n' - 'passed to one of these methods. For more details on when\n' - '"__class_getitem__" is called instead of "__getitem__", ' - 'see\n' - '__class_getitem__ versus __getitem__.\n' - '\n' - 'If the expression list contains at least one comma, or if ' - 'any of the\n' - 'expressions are starred, the expression list will evaluate ' - 'to a\n' - '"tuple" containing the items of the expression list. ' - 'Otherwise, the\n' - 'expression list will evaluate to the value of the list’s ' - 'sole member.\n' - '\n' - 'Changed in version 3.11: Expressions in an expression list ' - 'may be\n' - 'starred. See **PEP 646**.\n' - '\n' - 'For built-in objects, there are two types of objects that ' - 'support\n' - 'subscription via "__getitem__()":\n' - '\n' - '1. Mappings. If the primary is a *mapping*, the expression ' - 'list must\n' - ' evaluate to an object whose value is one of the keys of ' - 'the\n' - ' mapping, and the subscription selects the value in the ' - 'mapping that\n' - ' corresponds to that key. An example of a builtin mapping ' - 'class is\n' - ' the "dict" class.\n' - '\n' - '2. Sequences. If the primary is a *sequence*, the ' - 'expression list must\n' - ' evaluate to an "int" or a "slice" (as discussed in the ' - 'following\n' - ' section). Examples of builtin sequence classes include ' - 'the "str",\n' - ' "list" and "tuple" classes.\n' - '\n' - 'The formal syntax makes no special provision for negative ' - 'indices in\n' - '*sequences*. However, built-in sequences all provide a ' - '"__getitem__()"\n' - 'method that interprets negative indices by adding the ' - 'length of the\n' - 'sequence to the index so that, for example, "x[-1]" selects ' - 'the last\n' - 'item of "x". The resulting value must be a nonnegative ' - 'integer less\n' - 'than the number of items in the sequence, and the ' - 'subscription selects\n' - 'the item whose index is that value (counting from zero). ' - 'Since the\n' - 'support for negative indices and slicing occurs in the ' - 'object’s\n' - '"__getitem__()" method, subclasses overriding this method ' - 'will need to\n' - 'explicitly add that support.\n' - '\n' - 'A "string" is a special kind of sequence whose items are ' - '*characters*.\n' - 'A character is not a separate data type but a string of ' - 'exactly one\n' - 'character.\n', - 'truth': 'Truth Value Testing\n' - '*******************\n' - '\n' - 'Any object can be tested for truth value, for use in an "if" or\n' - '"while" condition or as operand of the Boolean operations below.\n' - '\n' - 'By default, an object is considered true unless its class defines\n' - 'either a "__bool__()" method that returns "False" or a "__len__()"\n' - 'method that returns zero, when called with the object. [1] Here ' - 'are\n' - 'most of the built-in objects considered false:\n' - '\n' - '* constants defined to be false: "None" and "False"\n' - '\n' - '* zero of any numeric type: "0", "0.0", "0j", "Decimal(0)",\n' - ' "Fraction(0, 1)"\n' - '\n' - '* empty sequences and collections: "\'\'", "()", "[]", "{}", ' - '"set()",\n' - ' "range(0)"\n' - '\n' - 'Operations and built-in functions that have a Boolean result ' - 'always\n' - 'return "0" or "False" for false and "1" or "True" for true, unless\n' - 'otherwise stated. (Important exception: the Boolean operations ' - '"or"\n' - 'and "and" always return one of their operands.)\n', - 'try': 'The "try" statement\n' - '*******************\n' - '\n' - 'The "try" statement specifies exception handlers and/or cleanup code\n' - 'for a group of statements:\n' - '\n' - ' try_stmt ::= try1_stmt | try2_stmt | try3_stmt\n' - ' try1_stmt ::= "try" ":" suite\n' - ' ("except" [expression ["as" identifier]] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try2_stmt ::= "try" ":" suite\n' - ' ("except" "*" expression ["as" identifier] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try3_stmt ::= "try" ":" suite\n' - ' "finally" ":" suite\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' - '\n' - '"except" clause\n' - '===============\n' - '\n' - 'The "except" clause(s) specify one or more exception handlers. When ' - 'no\n' - 'exception occurs in the "try" clause, no exception handler is\n' - 'executed. When an exception occurs in the "try" suite, a search for ' - 'an\n' - 'exception handler is started. This search inspects the "except"\n' - 'clauses in turn until one is found that matches the exception. An\n' - 'expression-less "except" clause, if present, must be last; it ' - 'matches\n' - 'any exception.\n' - '\n' - 'For an "except" clause with an expression, the expression must\n' - 'evaluate to an exception type or a tuple of exception types. The\n' - 'raised exception matches an "except" clause whose expression ' - 'evaluates\n' - 'to the class or a *non-virtual base class* of the exception object, ' - 'or\n' - 'to a tuple that contains such a class.\n' - '\n' - 'If no "except" clause matches the exception, the search for an\n' - 'exception handler continues in the surrounding code and on the\n' - 'invocation stack. [1]\n' - '\n' - 'If the evaluation of an expression in the header of an "except" ' - 'clause\n' - 'raises an exception, the original search for a handler is canceled ' - 'and\n' - 'a search starts for the new exception in the surrounding code and on\n' - 'the call stack (it is treated as if the entire "try" statement ' - 'raised\n' - 'the exception).\n' - '\n' - 'When a matching "except" clause is found, the exception is assigned ' - 'to\n' - 'the target specified after the "as" keyword in that "except" clause,\n' - 'if present, and the "except" clause’s suite is executed. All ' - '"except"\n' - 'clauses must have an executable block. When the end of this block is\n' - 'reached, execution continues normally after the entire "try"\n' - 'statement. (This means that if two nested handlers exist for the ' - 'same\n' - 'exception, and the exception occurs in the "try" clause of the inner\n' - 'handler, the outer handler will not handle the exception.)\n' - '\n' - 'When an exception has been assigned using "as target", it is cleared\n' - 'at the end of the "except" clause. This is as if\n' - '\n' - ' except E as N:\n' - ' foo\n' - '\n' - 'was translated to\n' - '\n' - ' except E as N:\n' - ' try:\n' - ' foo\n' - ' finally:\n' - ' del N\n' - '\n' - 'This means the exception must be assigned to a different name to be\n' - 'able to refer to it after the "except" clause. Exceptions are ' - 'cleared\n' - 'because with the traceback attached to them, they form a reference\n' - 'cycle with the stack frame, keeping all locals in that frame alive\n' - 'until the next garbage collection occurs.\n' - '\n' - 'Before an "except" clause’s suite is executed, the exception is ' - 'stored\n' - 'in the "sys" module, where it can be accessed from within the body ' - 'of\n' - 'the "except" clause by calling "sys.exception()". When leaving an\n' - 'exception handler, the exception stored in the "sys" module is reset\n' - 'to its previous value:\n' - '\n' - ' >>> print(sys.exception())\n' - ' None\n' - ' >>> try:\n' - ' ... raise TypeError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... try:\n' - ' ... raise ValueError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... print(repr(sys.exception()))\n' - ' ...\n' - ' TypeError()\n' - ' ValueError()\n' - ' TypeError()\n' - ' >>> print(sys.exception())\n' - ' None\n' - '\n' - '\n' - '"except*" clause\n' - '================\n' - '\n' - 'The "except*" clause(s) are used for handling "ExceptionGroup"s. The\n' - 'exception type for matching is interpreted as in the case of ' - '"except",\n' - 'but in the case of exception groups we can have partial matches when\n' - 'the type matches some of the exceptions in the group. This means ' - 'that\n' - 'multiple "except*" clauses can execute, each handling part of the\n' - 'exception group. Each clause executes at most once and handles an\n' - 'exception group of all matching exceptions. Each exception in the\n' - 'group is handled by at most one "except*" clause, the first that\n' - 'matches it.\n' - '\n' - ' >>> try:\n' - ' ... raise ExceptionGroup("eg",\n' - ' ... [ValueError(1), TypeError(2), OSError(3), ' - 'OSError(4)])\n' - ' ... except* TypeError as e:\n' - " ... print(f'caught {type(e)} with nested {e.exceptions}')\n" - ' ... except* OSError as e:\n' - " ... print(f'caught {type(e)} with nested {e.exceptions}')\n" - ' ...\n' - " caught with nested (TypeError(2),)\n" - " caught with nested (OSError(3), " - 'OSError(4))\n' - ' + Exception Group Traceback (most recent call last):\n' - ' | File "", line 2, in \n' - ' | ExceptionGroup: eg\n' - ' +-+---------------- 1 ----------------\n' - ' | ValueError: 1\n' - ' +------------------------------------\n' - '\n' - 'Any remaining exceptions that were not handled by any "except*" ' - 'clause\n' - 'are re-raised at the end, along with all exceptions that were raised\n' - 'from within the "except*" clauses. If this list contains more than ' - 'one\n' - 'exception to reraise, they are combined into an exception group.\n' - '\n' - 'If the raised exception is not an exception group and its type ' - 'matches\n' - 'one of the "except*" clauses, it is caught and wrapped by an ' - 'exception\n' - 'group with an empty message string.\n' - '\n' - ' >>> try:\n' - ' ... raise BlockingIOError\n' - ' ... except* BlockingIOError as e:\n' - ' ... print(repr(e))\n' - ' ...\n' - " ExceptionGroup('', (BlockingIOError()))\n" - '\n' - 'An "except*" clause must have a matching expression; it cannot be\n' - '"except*:". Furthermore, this expression cannot contain exception\n' - 'group types, because that would have ambiguous semantics.\n' - '\n' - 'It is not possible to mix "except" and "except*" in the same "try".\n' - '"break", "continue" and "return" cannot appear in an "except*" ' - 'clause.\n' - '\n' - '\n' - '"else" clause\n' - '=============\n' - '\n' - 'The optional "else" clause is executed if the control flow leaves ' - 'the\n' - '"try" suite, no exception was raised, and no "return", "continue", ' - 'or\n' - '"break" statement was executed. Exceptions in the "else" clause are\n' - 'not handled by the preceding "except" clauses.\n' - '\n' - '\n' - '"finally" clause\n' - '================\n' - '\n' - 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' - '"try"\n' - 'clause is executed, including any "except" and "else" clauses. If ' - 'an\n' - 'exception occurs in any of the clauses and is not handled, the\n' - 'exception is temporarily saved. The "finally" clause is executed. ' - 'If\n' - 'there is a saved exception it is re-raised at the end of the ' - '"finally"\n' - 'clause. If the "finally" clause raises another exception, the saved\n' - 'exception is set as the context of the new exception. If the ' - '"finally"\n' - 'clause executes a "return", "break" or "continue" statement, the ' - 'saved\n' - 'exception is discarded:\n' - '\n' - ' >>> def f():\n' - ' ... try:\n' - ' ... 1/0\n' - ' ... finally:\n' - ' ... return 42\n' - ' ...\n' - ' >>> f()\n' - ' 42\n' - '\n' - 'The exception information is not available to the program during\n' - 'execution of the "finally" clause.\n' - '\n' - 'When a "return", "break" or "continue" statement is executed in the\n' - '"try" suite of a "try"…"finally" statement, the "finally" clause is\n' - 'also executed ‘on the way out.’\n' - '\n' - 'The return value of a function is determined by the last "return"\n' - 'statement executed. Since the "finally" clause always executes, a\n' - '"return" statement executed in the "finally" clause will always be ' - 'the\n' - 'last one executed:\n' - '\n' - ' >>> def foo():\n' - ' ... try:\n' - " ... return 'try'\n" - ' ... finally:\n' - " ... return 'finally'\n" - ' ...\n' - ' >>> foo()\n' - " 'finally'\n" - '\n' - 'Changed in version 3.8: Prior to Python 3.8, a "continue" statement\n' - 'was illegal in the "finally" clause due to a problem with the\n' - 'implementation.\n', - 'types': 'The standard type hierarchy\n' - '***************************\n' - '\n' - 'Below is a list of the types that are built into Python. ' - 'Extension\n' - 'modules (written in C, Java, or other languages, depending on the\n' - 'implementation) can define additional types. Future versions of\n' - 'Python may add types to the type hierarchy (e.g., rational ' - 'numbers,\n' - 'efficiently stored arrays of integers, etc.), although such ' - 'additions\n' - 'will often be provided via the standard library instead.\n' - '\n' - 'Some of the type descriptions below contain a paragraph listing\n' - '‘special attributes.’ These are attributes that provide access to ' - 'the\n' - 'implementation and are not intended for general use. Their ' - 'definition\n' - 'may change in the future.\n' - '\n' - '\n' - 'None\n' - '====\n' - '\n' - 'This type has a single value. There is a single object with this\n' - 'value. This object is accessed through the built-in name "None". It ' - 'is\n' - 'used to signify the absence of a value in many situations, e.g., it ' - 'is\n' - 'returned from functions that don’t explicitly return anything. Its\n' - 'truth value is false.\n' - '\n' - '\n' - 'NotImplemented\n' - '==============\n' - '\n' - 'This type has a single value. There is a single object with this\n' - 'value. This object is accessed through the built-in name\n' - '"NotImplemented". Numeric methods and rich comparison methods ' - 'should\n' - 'return this value if they do not implement the operation for the\n' - 'operands provided. (The interpreter will then try the reflected\n' - 'operation, or some other fallback, depending on the operator.) It\n' - 'should not be evaluated in a boolean context.\n' - '\n' - 'See Implementing the arithmetic operations for more details.\n' - '\n' - 'Changed in version 3.9: Evaluating "NotImplemented" in a boolean\n' - 'context was deprecated.\n' - '\n' - 'Changed in version 3.14: Evaluating "NotImplemented" in a boolean\n' - 'context now raises a "TypeError". It previously evaluated to ' - '"True"\n' - 'and emitted a "DeprecationWarning" since Python 3.9.\n' - '\n' - '\n' - 'Ellipsis\n' - '========\n' - '\n' - 'This type has a single value. There is a single object with this\n' - 'value. This object is accessed through the literal "..." or the ' - 'built-\n' - 'in name "Ellipsis". Its truth value is true.\n' - '\n' - '\n' - '"numbers.Number"\n' - '================\n' - '\n' - 'These are created by numeric literals and returned as results by\n' - 'arithmetic operators and arithmetic built-in functions. Numeric\n' - 'objects are immutable; once created their value never changes. ' - 'Python\n' - 'numbers are of course strongly related to mathematical numbers, ' - 'but\n' - 'subject to the limitations of numerical representation in ' - 'computers.\n' - '\n' - 'The string representations of the numeric classes, computed by\n' - '"__repr__()" and "__str__()", have the following properties:\n' - '\n' - '* They are valid numeric literals which, when passed to their ' - 'class\n' - ' constructor, produce an object having the value of the original\n' - ' numeric.\n' - '\n' - '* The representation is in base 10, when possible.\n' - '\n' - '* Leading zeros, possibly excepting a single zero before a decimal\n' - ' point, are not shown.\n' - '\n' - '* Trailing zeros, possibly excepting a single zero after a decimal\n' - ' point, are not shown.\n' - '\n' - '* A sign is shown only when the number is negative.\n' - '\n' - 'Python distinguishes between integers, floating-point numbers, and\n' - 'complex numbers:\n' - '\n' - '\n' - '"numbers.Integral"\n' - '------------------\n' - '\n' - 'These represent elements from the mathematical set of integers\n' - '(positive and negative).\n' - '\n' - 'Note:\n' - '\n' - ' The rules for integer representation are intended to give the ' - 'most\n' - ' meaningful interpretation of shift and mask operations involving\n' - ' negative integers.\n' - '\n' - 'There are two types of integers:\n' - '\n' - 'Integers ("int")\n' - ' These represent numbers in an unlimited range, subject to ' - 'available\n' - ' (virtual) memory only. For the purpose of shift and mask\n' - ' operations, a binary representation is assumed, and negative\n' - ' numbers are represented in a variant of 2’s complement which ' - 'gives\n' - ' the illusion of an infinite string of sign bits extending to ' - 'the\n' - ' left.\n' - '\n' - 'Booleans ("bool")\n' - ' These represent the truth values False and True. The two ' - 'objects\n' - ' representing the values "False" and "True" are the only Boolean\n' - ' objects. The Boolean type is a subtype of the integer type, and\n' - ' Boolean values behave like the values 0 and 1, respectively, in\n' - ' almost all contexts, the exception being that when converted to ' - 'a\n' - ' string, the strings ""False"" or ""True"" are returned,\n' - ' respectively.\n' - '\n' - '\n' - '"numbers.Real" ("float")\n' - '------------------------\n' - '\n' - 'These represent machine-level double precision floating-point ' - 'numbers.\n' - 'You are at the mercy of the underlying machine architecture (and C ' - 'or\n' - 'Java implementation) for the accepted range and handling of ' - 'overflow.\n' - 'Python does not support single-precision floating-point numbers; ' - 'the\n' - 'savings in processor and memory usage that are usually the reason ' - 'for\n' - 'using these are dwarfed by the overhead of using objects in Python, ' - 'so\n' - 'there is no reason to complicate the language with two kinds of\n' - 'floating-point numbers.\n' - '\n' - '\n' - '"numbers.Complex" ("complex")\n' - '-----------------------------\n' - '\n' - 'These represent complex numbers as a pair of machine-level double\n' - 'precision floating-point numbers. The same caveats apply as for\n' - 'floating-point numbers. The real and imaginary parts of a complex\n' - 'number "z" can be retrieved through the read-only attributes ' - '"z.real"\n' - 'and "z.imag".\n' - '\n' - '\n' - 'Sequences\n' - '=========\n' - '\n' - 'These represent finite ordered sets indexed by non-negative ' - 'numbers.\n' - 'The built-in function "len()" returns the number of items of a\n' - 'sequence. When the length of a sequence is *n*, the index set ' - 'contains\n' - 'the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected ' - 'by\n' - '"a[i]". Some sequences, including built-in sequences, interpret\n' - 'negative subscripts by adding the sequence length. For example,\n' - '"a[-2]" equals "a[n-2]", the second to last item of sequence a ' - 'with\n' - 'length "n".\n' - '\n' - 'Sequences also support slicing: "a[i:j]" selects all items with ' - 'index\n' - '*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a\n' - 'slice is a sequence of the same type. The comment above about ' - 'negative\n' - 'indexes also applies to negative slice positions.\n' - '\n' - 'Some sequences also support “extended slicing†with a third “stepâ€\n' - 'parameter: "a[i:j:k]" selects all items of *a* with index *x* where ' - '"x\n' - '= i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*.\n' - '\n' - 'Sequences are distinguished according to their mutability:\n' - '\n' - '\n' - 'Immutable sequences\n' - '-------------------\n' - '\n' - 'An object of an immutable sequence type cannot change once it is\n' - 'created. (If the object contains references to other objects, ' - 'these\n' - 'other objects may be mutable and may be changed; however, the\n' - 'collection of objects directly referenced by an immutable object\n' - 'cannot change.)\n' - '\n' - 'The following types are immutable sequences:\n' - '\n' - 'Strings\n' - ' A string is a sequence of values that represent Unicode code\n' - ' points. All the code points in the range "U+0000 - U+10FFFF" can ' - 'be\n' - ' represented in a string. Python doesn’t have a char type; ' - 'instead,\n' - ' every code point in the string is represented as a string ' - 'object\n' - ' with length "1". The built-in function "ord()" converts a code\n' - ' point from its string form to an integer in the range "0 - ' - '10FFFF";\n' - ' "chr()" converts an integer in the range "0 - 10FFFF" to the\n' - ' corresponding length "1" string object. "str.encode()" can be ' - 'used\n' - ' to convert a "str" to "bytes" using the given text encoding, ' - 'and\n' - ' "bytes.decode()" can be used to achieve the opposite.\n' - '\n' - 'Tuples\n' - ' The items of a tuple are arbitrary Python objects. Tuples of two ' - 'or\n' - ' more items are formed by comma-separated lists of expressions. ' - 'A\n' - ' tuple of one item (a ‘singleton’) can be formed by affixing a ' - 'comma\n' - ' to an expression (an expression by itself does not create a ' - 'tuple,\n' - ' since parentheses must be usable for grouping of expressions). ' - 'An\n' - ' empty tuple can be formed by an empty pair of parentheses.\n' - '\n' - 'Bytes\n' - ' A bytes object is an immutable array. The items are 8-bit ' - 'bytes,\n' - ' represented by integers in the range 0 <= x < 256. Bytes ' - 'literals\n' - ' (like "b\'abc\'") and the built-in "bytes()" constructor can be ' - 'used\n' - ' to create bytes objects. Also, bytes objects can be decoded to\n' - ' strings via the "decode()" method.\n' - '\n' - '\n' - 'Mutable sequences\n' - '-----------------\n' - '\n' - 'Mutable sequences can be changed after they are created. The\n' - 'subscription and slicing notations can be used as the target of\n' - 'assignment and "del" (delete) statements.\n' - '\n' - 'Note:\n' - '\n' - ' The "collections" and "array" module provide additional examples ' - 'of\n' - ' mutable sequence types.\n' - '\n' - 'There are currently two intrinsic mutable sequence types:\n' - '\n' - 'Lists\n' - ' The items of a list are arbitrary Python objects. Lists are ' - 'formed\n' - ' by placing a comma-separated list of expressions in square\n' - ' brackets. (Note that there are no special cases needed to form\n' - ' lists of length 0 or 1.)\n' - '\n' - 'Byte Arrays\n' - ' A bytearray object is a mutable array. They are created by the\n' - ' built-in "bytearray()" constructor. Aside from being mutable ' - '(and\n' - ' hence unhashable), byte arrays otherwise provide the same ' - 'interface\n' - ' and functionality as immutable "bytes" objects.\n' - '\n' - '\n' - 'Set types\n' - '=========\n' - '\n' - 'These represent unordered, finite sets of unique, immutable ' - 'objects.\n' - 'As such, they cannot be indexed by any subscript. However, they can ' - 'be\n' - 'iterated over, and the built-in function "len()" returns the number ' - 'of\n' - 'items in a set. Common uses for sets are fast membership testing,\n' - 'removing duplicates from a sequence, and computing mathematical\n' - 'operations such as intersection, union, difference, and symmetric\n' - 'difference.\n' - '\n' - 'For set elements, the same immutability rules apply as for ' - 'dictionary\n' - 'keys. Note that numeric types obey the normal rules for numeric\n' - 'comparison: if two numbers compare equal (e.g., "1" and "1.0"), ' - 'only\n' - 'one of them can be contained in a set.\n' - '\n' - 'There are currently two intrinsic set types:\n' - '\n' - 'Sets\n' - ' These represent a mutable set. They are created by the built-in\n' - ' "set()" constructor and can be modified afterwards by several\n' - ' methods, such as "add()".\n' - '\n' - 'Frozen sets\n' - ' These represent an immutable set. They are created by the ' - 'built-in\n' - ' "frozenset()" constructor. As a frozenset is immutable and\n' - ' *hashable*, it can be used again as an element of another set, ' - 'or\n' - ' as a dictionary key.\n' - '\n' - '\n' - 'Mappings\n' - '========\n' - '\n' - 'These represent finite sets of objects indexed by arbitrary index\n' - 'sets. The subscript notation "a[k]" selects the item indexed by ' - '"k"\n' - 'from the mapping "a"; this can be used in expressions and as the\n' - 'target of assignments or "del" statements. The built-in function\n' - '"len()" returns the number of items in a mapping.\n' - '\n' - 'There is currently a single intrinsic mapping type:\n' - '\n' - '\n' - 'Dictionaries\n' - '------------\n' - '\n' - 'These represent finite sets of objects indexed by nearly arbitrary\n' - 'values. The only types of values not acceptable as keys are ' - 'values\n' - 'containing lists or dictionaries or other mutable types that are\n' - 'compared by value rather than by object identity, the reason being\n' - 'that the efficient implementation of dictionaries requires a key’s\n' - 'hash value to remain constant. Numeric types used for keys obey ' - 'the\n' - 'normal rules for numeric comparison: if two numbers compare equal\n' - '(e.g., "1" and "1.0") then they can be used interchangeably to ' - 'index\n' - 'the same dictionary entry.\n' - '\n' - 'Dictionaries preserve insertion order, meaning that keys will be\n' - 'produced in the same order they were added sequentially over the\n' - 'dictionary. Replacing an existing key does not change the order,\n' - 'however removing a key and re-inserting it will add it to the end\n' - 'instead of keeping its old place.\n' - '\n' - 'Dictionaries are mutable; they can be created by the "{}" notation\n' - '(see section Dictionary displays).\n' - '\n' - 'The extension modules "dbm.ndbm" and "dbm.gnu" provide additional\n' - 'examples of mapping types, as does the "collections" module.\n' - '\n' - 'Changed in version 3.7: Dictionaries did not preserve insertion ' - 'order\n' - 'in versions of Python before 3.6. In CPython 3.6, insertion order ' - 'was\n' - 'preserved, but it was considered an implementation detail at that ' - 'time\n' - 'rather than a language guarantee.\n' - '\n' - '\n' - 'Callable types\n' - '==============\n' - '\n' - 'These are the types to which the function call operation (see ' - 'section\n' - 'Calls) can be applied:\n' - '\n' - '\n' - 'User-defined functions\n' - '----------------------\n' - '\n' - 'A user-defined function object is created by a function definition\n' - '(see section Function definitions). It should be called with an\n' - 'argument list containing the same number of items as the ' - 'function’s\n' - 'formal parameter list.\n' - '\n' - '\n' - 'Special read-only attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| Attribute | ' - 'Meaning |\n' - '|====================================================|====================================================|\n' - '| function.__globals__ | A reference ' - 'to the "dictionary" that holds the |\n' - '| | function’s ' - 'global variables – the global namespace |\n' - '| | of the ' - 'module in which the function was defined. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__closure__ | "None" or a ' - '"tuple" of cells that contain bindings |\n' - '| | for the ' - 'names specified in the "co_freevars" |\n' - '| | attribute of ' - 'the function’s "code object". A cell |\n' - '| | object has ' - 'the attribute "cell_contents". This can |\n' - '| | be used to ' - 'get the value of the cell, as well as |\n' - '| | set the ' - 'value. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Special writable attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - 'Most of these attributes check the type of the assigned value:\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| Attribute | ' - 'Meaning |\n' - '|====================================================|====================================================|\n' - '| function.__doc__ | The ' - 'function’s documentation string, or "None" if |\n' - '| | ' - 'unavailable. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__name__ | The ' - 'function’s name. See also: "__name__ |\n' - '| | ' - 'attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__qualname__ | The ' - 'function’s *qualified name*. See also: |\n' - '| | ' - '"__qualname__ attributes". Added in version 3.3. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__module__ | The name of ' - 'the module the function was defined |\n' - '| | in, or ' - '"None" if unavailable. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__defaults__ | A "tuple" ' - 'containing default *parameter* values |\n' - '| | for those ' - 'parameters that have defaults, or "None" |\n' - '| | if no ' - 'parameters have a default value. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__code__ | The code ' - 'object representing the compiled function |\n' - '| | ' - 'body. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__dict__ | The ' - 'namespace supporting arbitrary function |\n' - '| | attributes. ' - 'See also: "__dict__ attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__annotations__ | A ' - '"dictionary" containing annotations of |\n' - '| | ' - '*parameters*. The keys of the dictionary are the |\n' - '| | parameter ' - 'names, and "\'return\'" for the return |\n' - '| | annotation, ' - 'if provided. See also: |\n' - '| | ' - '"object.__annotations__". Changed in version |\n' - '| | 3.14: ' - 'Annotations are now lazily evaluated. See |\n' - '| | **PEP ' - '649**. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__annotate__ | The ' - '*annotate function* for this function, or |\n' - '| | "None" if ' - 'the function has no annotations. See |\n' - '| | ' - '"object.__annotate__". Added in version 3.14. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__kwdefaults__ | A ' - '"dictionary" containing defaults for keyword- |\n' - '| | only ' - '*parameters*. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__type_params__ | A "tuple" ' - 'containing the type parameters of a |\n' - '| | generic ' - 'function. Added in version 3.12. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'Function objects also support getting and setting arbitrary\n' - 'attributes, which can be used, for example, to attach metadata to\n' - 'functions. Regular attribute dot-notation is used to get and set ' - 'such\n' - 'attributes.\n' - '\n' - '**CPython implementation detail:** CPython’s current ' - 'implementation\n' - 'only supports function attributes on user-defined functions. ' - 'Function\n' - 'attributes on built-in functions may be supported in the future.\n' - '\n' - 'Additional information about a function’s definition can be ' - 'retrieved\n' - 'from its code object (accessible via the "__code__" attribute).\n' - '\n' - '\n' - 'Instance methods\n' - '----------------\n' - '\n' - 'An instance method object combines a class, a class instance and ' - 'any\n' - 'callable object (normally a user-defined function).\n' - '\n' - 'Special read-only attributes:\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__self__ | Refers to ' - 'the class instance object to which the |\n' - '| | method is ' - 'bound |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__func__ | Refers to ' - 'the original function object |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__doc__ | The method’s ' - 'documentation (same as |\n' - '| | ' - '"method.__func__.__doc__"). A "string" if the |\n' - '| | original ' - 'function had a docstring, else "None". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__name__ | The name of ' - 'the method (same as |\n' - '| | ' - '"method.__func__.__name__") |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__module__ | The name of ' - 'the module the method was defined in, |\n' - '| | or "None" if ' - 'unavailable. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'Methods also support accessing (but not setting) the arbitrary\n' - 'function attributes on the underlying function object.\n' - '\n' - 'User-defined method objects may be created when getting an ' - 'attribute\n' - 'of a class (perhaps via an instance of that class), if that ' - 'attribute\n' - 'is a user-defined function object or a "classmethod" object.\n' - '\n' - 'When an instance method object is created by retrieving a ' - 'user-defined\n' - 'function object from a class via one of its instances, its ' - '"__self__"\n' - 'attribute is the instance, and the method object is said to be\n' - '*bound*. The new method’s "__func__" attribute is the original\n' - 'function object.\n' - '\n' - 'When an instance method object is created by retrieving a\n' - '"classmethod" object from a class or instance, its "__self__"\n' - 'attribute is the class itself, and its "__func__" attribute is the\n' - 'function object underlying the class method.\n' - '\n' - 'When an instance method object is called, the underlying function\n' - '("__func__") is called, inserting the class instance ("__self__") ' - 'in\n' - 'front of the argument list. For instance, when "C" is a class ' - 'which\n' - 'contains a definition for a function "f()", and "x" is an instance ' - 'of\n' - '"C", calling "x.f(1)" is equivalent to calling "C.f(x, 1)".\n' - '\n' - 'When an instance method object is derived from a "classmethod" ' - 'object,\n' - 'the “class instance†stored in "__self__" will actually be the ' - 'class\n' - 'itself, so that calling either "x.f(1)" or "C.f(1)" is equivalent ' - 'to\n' - 'calling "f(C,1)" where "f" is the underlying function.\n' - '\n' - 'It is important to note that user-defined functions which are\n' - 'attributes of a class instance are not converted to bound methods;\n' - 'this *only* happens when the function is an attribute of the ' - 'class.\n' - '\n' - '\n' - 'Generator functions\n' - '-------------------\n' - '\n' - 'A function or method which uses the "yield" statement (see section ' - 'The\n' - 'yield statement) is called a *generator function*. Such a ' - 'function,\n' - 'when called, always returns an *iterator* object which can be used ' - 'to\n' - 'execute the body of the function: calling the iterator’s\n' - '"iterator.__next__()" method will cause the function to execute ' - 'until\n' - 'it provides a value using the "yield" statement. When the ' - 'function\n' - 'executes a "return" statement or falls off the end, a ' - '"StopIteration"\n' - 'exception is raised and the iterator will have reached the end of ' - 'the\n' - 'set of values to be returned.\n' - '\n' - '\n' - 'Coroutine functions\n' - '-------------------\n' - '\n' - 'A function or method which is defined using "async def" is called ' - 'a\n' - '*coroutine function*. Such a function, when called, returns a\n' - '*coroutine* object. It may contain "await" expressions, as well ' - 'as\n' - '"async with" and "async for" statements. See also the Coroutine\n' - 'Objects section.\n' - '\n' - '\n' - 'Asynchronous generator functions\n' - '--------------------------------\n' - '\n' - 'A function or method which is defined using "async def" and which ' - 'uses\n' - 'the "yield" statement is called a *asynchronous generator ' - 'function*.\n' - 'Such a function, when called, returns an *asynchronous iterator*\n' - 'object which can be used in an "async for" statement to execute ' - 'the\n' - 'body of the function.\n' - '\n' - 'Calling the asynchronous iterator’s "aiterator.__anext__" method ' - 'will\n' - 'return an *awaitable* which when awaited will execute until it\n' - 'provides a value using the "yield" expression. When the function\n' - 'executes an empty "return" statement or falls off the end, a\n' - '"StopAsyncIteration" exception is raised and the asynchronous ' - 'iterator\n' - 'will have reached the end of the set of values to be yielded.\n' - '\n' - '\n' - 'Built-in functions\n' - '------------------\n' - '\n' - 'A built-in function object is a wrapper around a C function. ' - 'Examples\n' - 'of built-in functions are "len()" and "math.sin()" ("math" is a\n' - 'standard built-in module). The number and type of the arguments ' - 'are\n' - 'determined by the C function. Special read-only attributes:\n' - '\n' - '* "__doc__" is the function’s documentation string, or "None" if\n' - ' unavailable. See "function.__doc__".\n' - '\n' - '* "__name__" is the function’s name. See "function.__name__".\n' - '\n' - '* "__self__" is set to "None" (but see the next item).\n' - '\n' - '* "__module__" is the name of the module the function was defined ' - 'in\n' - ' or "None" if unavailable. See "function.__module__".\n' - '\n' - '\n' - 'Built-in methods\n' - '----------------\n' - '\n' - 'This is really a different disguise of a built-in function, this ' - 'time\n' - 'containing an object passed to the C function as an implicit extra\n' - 'argument. An example of a built-in method is "alist.append()",\n' - 'assuming *alist* is a list object. In this case, the special ' - 'read-only\n' - 'attribute "__self__" is set to the object denoted by *alist*. (The\n' - 'attribute has the same semantics as it does with "other instance\n' - 'methods".)\n' - '\n' - '\n' - 'Classes\n' - '-------\n' - '\n' - 'Classes are callable. These objects normally act as factories for ' - 'new\n' - 'instances of themselves, but variations are possible for class ' - 'types\n' - 'that override "__new__()". The arguments of the call are passed ' - 'to\n' - '"__new__()" and, in the typical case, to "__init__()" to ' - 'initialize\n' - 'the new instance.\n' - '\n' - '\n' - 'Class Instances\n' - '---------------\n' - '\n' - 'Instances of arbitrary classes can be made callable by defining a\n' - '"__call__()" method in their class.\n' - '\n' - '\n' - 'Modules\n' - '=======\n' - '\n' - 'Modules are a basic organizational unit of Python code, and are\n' - 'created by the import system as invoked either by the "import"\n' - 'statement, or by calling functions such as ' - '"importlib.import_module()"\n' - 'and built-in "__import__()". A module object has a namespace\n' - 'implemented by a "dictionary" object (this is the dictionary\n' - 'referenced by the "__globals__" attribute of functions defined in ' - 'the\n' - 'module). Attribute references are translated to lookups in this\n' - 'dictionary, e.g., "m.x" is equivalent to "m.__dict__["x"]". A ' - 'module\n' - 'object does not contain the code object used to initialize the ' - 'module\n' - '(since it isn’t needed once the initialization is done).\n' - '\n' - 'Attribute assignment updates the module’s namespace dictionary, ' - 'e.g.,\n' - '"m.x = 1" is equivalent to "m.__dict__["x"] = 1".\n' - '\n' - '\n' - 'Import-related attributes on module objects\n' - '-------------------------------------------\n' - '\n' - 'Module objects have the following attributes that relate to the ' - 'import\n' - 'system. When a module is created using the machinery associated ' - 'with\n' - 'the import system, these attributes are filled in based on the\n' - 'module’s *spec*, before the *loader* executes and loads the ' - 'module.\n' - '\n' - 'To create a module dynamically rather than using the import ' - 'system,\n' - 'it’s recommended to use "importlib.util.module_from_spec()", which\n' - 'will set the various import-controlled attributes to appropriate\n' - 'values. It’s also possible to use the "types.ModuleType" ' - 'constructor\n' - 'to create modules directly, but this technique is more error-prone, ' - 'as\n' - 'most attributes must be manually set on the module object after it ' - 'has\n' - 'been created when using this approach.\n' - '\n' - 'Caution:\n' - '\n' - ' With the exception of "__name__", it is **strongly** recommended\n' - ' that you rely on "__spec__" and its attributes instead of any of ' - 'the\n' - ' other individual attributes listed in this subsection. Note that\n' - ' updating an attribute on "__spec__" will not update the\n' - ' corresponding attribute on the module itself:\n' - '\n' - ' >>> import typing\n' - ' >>> typing.__name__, typing.__spec__.name\n' - " ('typing', 'typing')\n" - " >>> typing.__spec__.name = 'spelling'\n" - ' >>> typing.__name__, typing.__spec__.name\n' - " ('typing', 'spelling')\n" - " >>> typing.__name__ = 'keyboard_smashing'\n" - ' >>> typing.__name__, typing.__spec__.name\n' - " ('keyboard_smashing', 'spelling')\n" - '\n' - 'module.__name__\n' - '\n' - ' The name used to uniquely identify the module in the import ' - 'system.\n' - ' For a directly executed module, this will be set to ' - '""__main__"".\n' - '\n' - ' This attribute must be set to the fully qualified name of the\n' - ' module. It is expected to match the value of\n' - ' "module.__spec__.name".\n' - '\n' - 'module.__spec__\n' - '\n' - ' A record of the module’s import-system-related state.\n' - '\n' - ' Set to the "module spec" that was used when importing the ' - 'module.\n' - ' See Module specs for more details.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - 'module.__package__\n' - '\n' - ' The *package* a module belongs to.\n' - '\n' - ' If the module is top-level (that is, not a part of any specific\n' - ' package) then the attribute should be set to "\'\'" (the empty\n' - ' string). Otherwise, it should be set to the name of the ' - 'module’s\n' - ' package (which can be equal to "module.__name__" if the module\n' - ' itself is a package). See **PEP 366** for further details.\n' - '\n' - ' This attribute is used instead of "__name__" to calculate ' - 'explicit\n' - ' relative imports for main modules. It defaults to "None" for\n' - ' modules created dynamically using the "types.ModuleType"\n' - ' constructor; use "importlib.util.module_from_spec()" instead to\n' - ' ensure the attribute is set to a "str".\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.parent" instead of "module.__package__".\n' - ' "__package__" is now only used as a fallback if ' - '"__spec__.parent"\n' - ' is not set, and this fallback path is deprecated.\n' - '\n' - ' Changed in version 3.4: This attribute now defaults to "None" ' - 'for\n' - ' modules created dynamically using the "types.ModuleType"\n' - ' constructor. Previously the attribute was optional.\n' - '\n' - ' Changed in version 3.6: The value of "__package__" is expected ' - 'to\n' - ' be the same as "__spec__.parent". "__package__" is now only used ' - 'as\n' - ' a fallback during import resolution if "__spec__.parent" is not\n' - ' defined.\n' - '\n' - ' Changed in version 3.10: "ImportWarning" is raised if an import\n' - ' resolution falls back to "__package__" instead of\n' - ' "__spec__.parent".\n' - '\n' - ' Changed in version 3.12: Raise "DeprecationWarning" instead of\n' - ' "ImportWarning" when falling back to "__package__" during ' - 'import\n' - ' resolution.\n' - '\n' - ' Deprecated since version 3.13, will be removed in version 3.15:\n' - ' "__package__" will cease to be set or taken into consideration ' - 'by\n' - ' the import system or standard library.\n' - '\n' - 'module.__loader__\n' - '\n' - ' The *loader* object that the import machinery used to load the\n' - ' module.\n' - '\n' - ' This attribute is mostly useful for introspection, but can be ' - 'used\n' - ' for additional loader-specific functionality, for example ' - 'getting\n' - ' data associated with a loader.\n' - '\n' - ' "__loader__" defaults to "None" for modules created dynamically\n' - ' using the "types.ModuleType" constructor; use\n' - ' "importlib.util.module_from_spec()" instead to ensure the ' - 'attribute\n' - ' is set to a *loader* object.\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.loader" instead of "module.__loader__".\n' - '\n' - ' Changed in version 3.4: This attribute now defaults to "None" ' - 'for\n' - ' modules created dynamically using the "types.ModuleType"\n' - ' constructor. Previously the attribute was optional.\n' - '\n' - ' Deprecated since version 3.12, will be removed in version 3.16:\n' - ' Setting "__loader__" on a module while failing to set\n' - ' "__spec__.loader" is deprecated. In Python 3.16, "__loader__" ' - 'will\n' - ' cease to be set or taken into consideration by the import system ' - 'or\n' - ' the standard library.\n' - '\n' - 'module.__path__\n' - '\n' - ' A (possibly empty) *sequence* of strings enumerating the ' - 'locations\n' - ' where the package’s submodules will be found. Non-package ' - 'modules\n' - ' should not have a "__path__" attribute. See __path__ attributes ' - 'on\n' - ' modules for more details.\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.submodule_search_locations" instead of\n' - ' "module.__path__".\n' - '\n' - 'module.__file__\n' - '\n' - 'module.__cached__\n' - '\n' - ' "__file__" and "__cached__" are both optional attributes that ' - 'may\n' - ' or may not be set. Both attributes should be a "str" when they ' - 'are\n' - ' available.\n' - '\n' - ' "__file__" indicates the pathname of the file from which the ' - 'module\n' - ' was loaded (if loaded from a file), or the pathname of the ' - 'shared\n' - ' library file for extension modules loaded dynamically from a ' - 'shared\n' - ' library. It might be missing for certain types of modules, such ' - 'as\n' - ' C modules that are statically linked into the interpreter, and ' - 'the\n' - ' import system may opt to leave it unset if it has no semantic\n' - ' meaning (for example, a module loaded from a database).\n' - '\n' - ' If "__file__" is set then the "__cached__" attribute might also ' - 'be\n' - ' set, which is the path to any compiled version of the code ' - '(for\n' - ' example, a byte-compiled file). The file does not need to exist ' - 'to\n' - ' set this attribute; the path can simply point to where the ' - 'compiled\n' - ' file *would* exist (see **PEP 3147**).\n' - '\n' - ' Note that "__cached__" may be set even if "__file__" is not ' - 'set.\n' - ' However, that scenario is quite atypical. Ultimately, the ' - '*loader*\n' - ' is what makes use of the module spec provided by the *finder* ' - '(from\n' - ' which "__file__" and "__cached__" are derived). So if a loader ' - 'can\n' - ' load from a cached module but otherwise does not load from a ' - 'file,\n' - ' that atypical scenario may be appropriate.\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.cached" instead of "module.__cached__".\n' - '\n' - ' Deprecated since version 3.13, will be removed in version 3.15:\n' - ' Setting "__cached__" on a module while failing to set\n' - ' "__spec__.cached" is deprecated. In Python 3.15, "__cached__" ' - 'will\n' - ' cease to be set or taken into consideration by the import system ' - 'or\n' - ' standard library.\n' - '\n' - '\n' - 'Other writable attributes on module objects\n' - '-------------------------------------------\n' - '\n' - 'As well as the import-related attributes listed above, module ' - 'objects\n' - 'also have the following writable attributes:\n' - '\n' - 'module.__doc__\n' - '\n' - ' The module’s documentation string, or "None" if unavailable. ' - 'See\n' - ' also: "__doc__ attributes".\n' - '\n' - 'module.__annotations__\n' - '\n' - ' A dictionary containing *variable annotations* collected during\n' - ' module body execution. For best practices on working with\n' - ' "__annotations__", see "annotationlib".\n' - '\n' - ' Changed in version 3.14: Annotations are now lazily evaluated. ' - 'See\n' - ' **PEP 649**.\n' - '\n' - 'module.__annotate__\n' - '\n' - ' The *annotate function* for this module, or "None" if the ' - 'module\n' - ' has no annotations. See also: "__annotate__" attributes.\n' - '\n' - ' Added in version 3.14.\n' - '\n' - '\n' - 'Module dictionaries\n' - '-------------------\n' - '\n' - 'Module objects also have the following special read-only ' - 'attribute:\n' - '\n' - 'module.__dict__\n' - '\n' - ' The module’s namespace as a dictionary object. Uniquely among ' - 'the\n' - ' attributes listed here, "__dict__" cannot be accessed as a ' - 'global\n' - ' variable from within a module; it can only be accessed as an\n' - ' attribute on module objects.\n' - '\n' - ' **CPython implementation detail:** Because of the way CPython\n' - ' clears module dictionaries, the module dictionary will be ' - 'cleared\n' - ' when the module falls out of scope even if the dictionary still ' - 'has\n' - ' live references. To avoid this, copy the dictionary or keep ' - 'the\n' - ' module around while using its dictionary directly.\n' - '\n' - '\n' - 'Custom classes\n' - '==============\n' - '\n' - 'Custom class types are typically created by class definitions (see\n' - 'section Class definitions). A class has a namespace implemented by ' - 'a\n' - 'dictionary object. Class attribute references are translated to\n' - 'lookups in this dictionary, e.g., "C.x" is translated to\n' - '"C.__dict__["x"]" (although there are a number of hooks which ' - 'allow\n' - 'for other means of locating attributes). When the attribute name ' - 'is\n' - 'not found there, the attribute search continues in the base ' - 'classes.\n' - 'This search of the base classes uses the C3 method resolution ' - 'order\n' - 'which behaves correctly even in the presence of ‘diamond’ ' - 'inheritance\n' - 'structures where there are multiple inheritance paths leading back ' - 'to\n' - 'a common ancestor. Additional details on the C3 MRO used by Python ' - 'can\n' - 'be found at The Python 2.3 Method Resolution Order.\n' - '\n' - 'When a class attribute reference (for class "C", say) would yield ' - 'a\n' - 'class method object, it is transformed into an instance method ' - 'object\n' - 'whose "__self__" attribute is "C". When it would yield a\n' - '"staticmethod" object, it is transformed into the object wrapped ' - 'by\n' - 'the static method object. See section Implementing Descriptors for\n' - 'another way in which attributes retrieved from a class may differ ' - 'from\n' - 'those actually contained in its "__dict__".\n' - '\n' - 'Class attribute assignments update the class’s dictionary, never ' - 'the\n' - 'dictionary of a base class.\n' - '\n' - 'A class object can be called (see above) to yield a class instance\n' - '(see below).\n' - '\n' - '\n' - 'Special attributes\n' - '------------------\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| Attribute | ' - 'Meaning |\n' - '|====================================================|====================================================|\n' - '| type.__name__ | The class’s ' - 'name. See also: "__name__ attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__qualname__ | The class’s ' - '*qualified name*. See also: |\n' - '| | ' - '"__qualname__ attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__module__ | The name of ' - 'the module in which the class was |\n' - '| | ' - 'defined. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__dict__ | A "mapping ' - 'proxy" providing a read-only view of |\n' - '| | the class’s ' - 'namespace. See also: "__dict__ |\n' - '| | ' - 'attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__bases__ | A "tuple" ' - 'containing the class’s bases. In most |\n' - '| | cases, for a ' - 'class defined as "class X(A, B, C)", |\n' - '| | ' - '"X.__bases__" will be exactly equal to "(A, B, |\n' - '| | ' - 'C)". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__doc__ | The class’s ' - 'documentation string, or "None" if |\n' - '| | undefined. ' - 'Not inherited by subclasses. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__annotations__ | A dictionary ' - 'containing *variable annotations* |\n' - '| | collected ' - 'during class body execution. See also: |\n' - '| | ' - '"__annotations__ attributes". For best practices |\n' - '| | on working ' - 'with "__annotations__", please see |\n' - '| | ' - '"annotationlib". Caution: Accessing the |\n' - '| | ' - '"__annotations__" attribute of a class object |\n' - '| | directly may ' - 'yield incorrect results in the |\n' - '| | presence of ' - 'metaclasses. In addition, the |\n' - '| | attribute ' - 'may not exist for some classes. Use |\n' - '| | ' - '"annotationlib.get_annotations()" to retrieve |\n' - '| | class ' - 'annotations safely. Changed in version |\n' - '| | 3.14: ' - 'Annotations are now lazily evaluated. See |\n' - '| | **PEP ' - '649**. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__annotate__() | The ' - '*annotate function* for this class, or "None" |\n' - '| | if the class ' - 'has no annotations. See also: |\n' - '| | ' - '"__annotate__ attributes". Caution: Accessing |\n' - '| | the ' - '"__annotate__" attribute of a class object |\n' - '| | directly may ' - 'yield incorrect results in the |\n' - '| | presence of ' - 'metaclasses. Use |\n' - '| | ' - '"annotationlib.get_annotate_function()" to |\n' - '| | retrieve the ' - 'annotate function safely. Added in |\n' - '| | version ' - '3.14. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__type_params__ | A "tuple" ' - 'containing the type parameters of a |\n' - '| | generic ' - 'class. Added in version 3.12. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__static_attributes__ | A "tuple" ' - 'containing names of attributes of this |\n' - '| | class which ' - 'are assigned through "self.X" from any |\n' - '| | function in ' - 'its body. Added in version 3.13. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__firstlineno__ | The line ' - 'number of the first line of the class |\n' - '| | definition, ' - 'including decorators. Setting the |\n' - '| | "__module__" ' - 'attribute removes the |\n' - '| | ' - '"__firstlineno__" item from the type’s dictionary. |\n' - '| | Added in ' - 'version 3.13. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__mro__ | The "tuple" ' - 'of classes that are considered when |\n' - '| | looking for ' - 'base classes during method resolution. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Special methods\n' - '---------------\n' - '\n' - 'In addition to the special attributes described above, all Python\n' - 'classes also have the following two methods available:\n' - '\n' - 'type.mro()\n' - '\n' - ' This method can be overridden by a metaclass to customize the\n' - ' method resolution order for its instances. It is called at ' - 'class\n' - ' instantiation, and its result is stored in "__mro__".\n' - '\n' - 'type.__subclasses__()\n' - '\n' - ' Each class keeps a list of weak references to its immediate\n' - ' subclasses. This method returns a list of all those references\n' - ' still alive. The list is in definition order. Example:\n' - '\n' - ' >>> class A: pass\n' - ' >>> class B(A): pass\n' - ' >>> A.__subclasses__()\n' - " []\n" - '\n' - '\n' - 'Class instances\n' - '===============\n' - '\n' - 'A class instance is created by calling a class object (see above). ' - 'A\n' - 'class instance has a namespace implemented as a dictionary which ' - 'is\n' - 'the first place in which attribute references are searched. When ' - 'an\n' - 'attribute is not found there, and the instance’s class has an\n' - 'attribute by that name, the search continues with the class\n' - 'attributes. If a class attribute is found that is a user-defined\n' - 'function object, it is transformed into an instance method object\n' - 'whose "__self__" attribute is the instance. Static method and ' - 'class\n' - 'method objects are also transformed; see above under “Classesâ€. ' - 'See\n' - 'section Implementing Descriptors for another way in which ' - 'attributes\n' - 'of a class retrieved via its instances may differ from the objects\n' - 'actually stored in the class’s "__dict__". If no class attribute ' - 'is\n' - 'found, and the object’s class has a "__getattr__()" method, that ' - 'is\n' - 'called to satisfy the lookup.\n' - '\n' - 'Attribute assignments and deletions update the instance’s ' - 'dictionary,\n' - 'never a class’s dictionary. If the class has a "__setattr__()" or\n' - '"__delattr__()" method, this is called instead of updating the\n' - 'instance dictionary directly.\n' - '\n' - 'Class instances can pretend to be numbers, sequences, or mappings ' - 'if\n' - 'they have methods with certain special names. See section Special\n' - 'method names.\n' - '\n' - '\n' - 'Special attributes\n' - '------------------\n' - '\n' - 'object.__class__\n' - '\n' - ' The class to which a class instance belongs.\n' - '\n' - 'object.__dict__\n' - '\n' - ' A dictionary or other mapping object used to store an object’s\n' - ' (writable) attributes. Not all instances have a "__dict__"\n' - ' attribute; see the section on __slots__ for more details.\n' - '\n' - '\n' - 'I/O objects (also known as file objects)\n' - '========================================\n' - '\n' - 'A *file object* represents an open file. Various shortcuts are\n' - 'available to create file objects: the "open()" built-in function, ' - 'and\n' - 'also "os.popen()", "os.fdopen()", and the "makefile()" method of\n' - 'socket objects (and perhaps by other functions or methods provided ' - 'by\n' - 'extension modules).\n' - '\n' - 'The objects "sys.stdin", "sys.stdout" and "sys.stderr" are ' - 'initialized\n' - 'to file objects corresponding to the interpreter’s standard input,\n' - 'output and error streams; they are all open in text mode and ' - 'therefore\n' - 'follow the interface defined by the "io.TextIOBase" abstract ' - 'class.\n' - '\n' - '\n' - 'Internal types\n' - '==============\n' - '\n' - 'A few types used internally by the interpreter are exposed to the\n' - 'user. Their definitions may change with future versions of the\n' - 'interpreter, but they are mentioned here for completeness.\n' - '\n' - '\n' - 'Code objects\n' - '------------\n' - '\n' - 'Code objects represent *byte-compiled* executable Python code, or\n' - '*bytecode*. The difference between a code object and a function ' - 'object\n' - 'is that the function object contains an explicit reference to the\n' - 'function’s globals (the module in which it was defined), while a ' - 'code\n' - 'object contains no context; also the default argument values are\n' - 'stored in the function object, not in the code object (because ' - 'they\n' - 'represent values calculated at run-time). Unlike function ' - 'objects,\n' - 'code objects are immutable and contain no references (directly or\n' - 'indirectly) to mutable objects.\n' - '\n' - '\n' - 'Special read-only attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_name | The function ' - 'name |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_qualname | The fully ' - 'qualified function name Added in |\n' - '| | version ' - '3.11. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_argcount | The total ' - 'number of positional *parameters* |\n' - '| | (including ' - 'positional-only parameters and |\n' - '| | parameters ' - 'with default values) that the function |\n' - '| | ' - 'has |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_posonlyargcount | The number ' - 'of positional-only *parameters* |\n' - '| | (including ' - 'arguments with default values) that the |\n' - '| | function ' - 'has |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_kwonlyargcount | The number ' - 'of keyword-only *parameters* (including |\n' - '| | arguments ' - 'with default values) that the function |\n' - '| | ' - 'has |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_nlocals | The number ' - 'of local variables used by the function |\n' - '| | (including ' - 'parameters) |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_varnames | A "tuple" ' - 'containing the names of the local |\n' - '| | variables in ' - 'the function (starting with the |\n' - '| | parameter ' - 'names) |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_cellvars | A "tuple" ' - 'containing the names of local variables |\n' - '| | that are ' - 'referenced from at least one *nested |\n' - '| | scope* ' - 'inside the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_freevars | A "tuple" ' - 'containing the names of *free (closure) |\n' - '| | variables* ' - 'that a *nested scope* references in an |\n' - '| | outer scope. ' - 'See also "function.__closure__". |\n' - '| | Note: ' - 'references to global and builtin names are |\n' - '| | *not* ' - 'included. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_code | A string ' - 'representing the sequence of *bytecode* |\n' - '| | instructions ' - 'in the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_consts | A "tuple" ' - 'containing the literals used by the |\n' - '| | *bytecode* ' - 'in the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_names | A "tuple" ' - 'containing the names used by the |\n' - '| | *bytecode* ' - 'in the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_filename | The name of ' - 'the file from which the code was |\n' - '| | ' - 'compiled |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_firstlineno | The line ' - 'number of the first line of the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_lnotab | A string ' - 'encoding the mapping from *bytecode* |\n' - '| | offsets to ' - 'line numbers. For details, see the |\n' - '| | source code ' - 'of the interpreter. Deprecated since |\n' - '| | version ' - '3.12: This attribute of code objects is |\n' - '| | deprecated, ' - 'and may be removed in Python 3.15. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_stacksize | The required ' - 'stack size of the code object |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_flags | An "integer" ' - 'encoding a number of flags for the |\n' - '| | ' - 'interpreter. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'The following flag bits are defined for "co_flags": bit "0x04" is ' - 'set\n' - 'if the function uses the "*arguments" syntax to accept an ' - 'arbitrary\n' - 'number of positional arguments; bit "0x08" is set if the function ' - 'uses\n' - 'the "**keywords" syntax to accept arbitrary keyword arguments; bit\n' - '"0x20" is set if the function is a generator. See Code Objects Bit\n' - 'Flags for details on the semantics of each flags that might be\n' - 'present.\n' - '\n' - 'Future feature declarations ("from __future__ import division") ' - 'also\n' - 'use bits in "co_flags" to indicate whether a code object was ' - 'compiled\n' - 'with a particular feature enabled: bit "0x2000" is set if the ' - 'function\n' - 'was compiled with future division enabled; bits "0x10" and ' - '"0x1000"\n' - 'were used in earlier versions of Python.\n' - '\n' - 'Other bits in "co_flags" are reserved for internal use.\n' - '\n' - 'If a code object represents a function and has a docstring, the ' - 'first\n' - 'item in "co_consts" is the docstring of the function.\n' - '\n' - '\n' - 'Methods on code objects\n' - '~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - 'codeobject.co_positions()\n' - '\n' - ' Returns an iterable over the source code positions of each\n' - ' *bytecode* instruction in the code object.\n' - '\n' - ' The iterator returns "tuple"s containing the "(start_line,\n' - ' end_line, start_column, end_column)". The *i-th* tuple ' - 'corresponds\n' - ' to the position of the source code that compiled to the *i-th* ' - 'code\n' - ' unit. Column information is 0-indexed utf-8 byte offsets on the\n' - ' given source line.\n' - '\n' - ' This positional information can be missing. A non-exhaustive ' - 'lists\n' - ' of cases where this may happen:\n' - '\n' - ' * Running the interpreter with "-X" "no_debug_ranges".\n' - '\n' - ' * Loading a pyc file compiled while using "-X" ' - '"no_debug_ranges".\n' - '\n' - ' * Position tuples corresponding to artificial instructions.\n' - '\n' - ' * Line and column numbers that can’t be represented due to\n' - ' implementation specific limitations.\n' - '\n' - ' When this occurs, some or all of the tuple elements can be ' - '"None".\n' - '\n' - ' Added in version 3.11.\n' - '\n' - ' Note:\n' - '\n' - ' This feature requires storing column positions in code ' - 'objects\n' - ' which may result in a small increase of disk usage of ' - 'compiled\n' - ' Python files or interpreter memory usage. To avoid storing ' - 'the\n' - ' extra information and/or deactivate printing the extra ' - 'traceback\n' - ' information, the "-X" "no_debug_ranges" command line flag or ' - 'the\n' - ' "PYTHONNODEBUGRANGES" environment variable can be used.\n' - '\n' - 'codeobject.co_lines()\n' - '\n' - ' Returns an iterator that yields information about successive ' - 'ranges\n' - ' of *bytecode*s. Each item yielded is a "(start, end, lineno)"\n' - ' "tuple":\n' - '\n' - ' * "start" (an "int") represents the offset (inclusive) of the ' - 'start\n' - ' of the *bytecode* range\n' - '\n' - ' * "end" (an "int") represents the offset (exclusive) of the end ' - 'of\n' - ' the *bytecode* range\n' - '\n' - ' * "lineno" is an "int" representing the line number of the\n' - ' *bytecode* range, or "None" if the bytecodes in the given ' - 'range\n' - ' have no line number\n' - '\n' - ' The items yielded will have the following properties:\n' - '\n' - ' * The first range yielded will have a "start" of 0.\n' - '\n' - ' * The "(start, end)" ranges will be non-decreasing and ' - 'consecutive.\n' - ' That is, for any pair of "tuple"s, the "start" of the second ' - 'will\n' - ' be equal to the "end" of the first.\n' - '\n' - ' * No range will be backwards: "end >= start" for all triples.\n' - '\n' - ' * The last "tuple" yielded will have "end" equal to the size of ' - 'the\n' - ' *bytecode*.\n' - '\n' - ' Zero-width ranges, where "start == end", are allowed. ' - 'Zero-width\n' - ' ranges are used for lines that are present in the source code, ' - 'but\n' - ' have been eliminated by the *bytecode* compiler.\n' - '\n' - ' Added in version 3.10.\n' - '\n' - ' See also:\n' - '\n' - ' **PEP 626** - Precise line numbers for debugging and other ' - 'tools.\n' - ' The PEP that introduced the "co_lines()" method.\n' - '\n' - 'codeobject.replace(**kwargs)\n' - '\n' - ' Return a copy of the code object with new values for the ' - 'specified\n' - ' fields.\n' - '\n' - ' Code objects are also supported by the generic function\n' - ' "copy.replace()".\n' - '\n' - ' Added in version 3.8.\n' - '\n' - '\n' - 'Frame objects\n' - '-------------\n' - '\n' - 'Frame objects represent execution frames. They may occur in ' - 'traceback\n' - 'objects, and are also passed to registered trace functions.\n' - '\n' - '\n' - 'Special read-only attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_back | Points to ' - 'the previous stack frame (towards the |\n' - '| | caller), or ' - '"None" if this is the bottom stack |\n' - '| | ' - 'frame |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_code | The code ' - 'object being executed in this frame. |\n' - '| | Accessing ' - 'this attribute raises an auditing event |\n' - '| | ' - '"object.__getattr__" with arguments "obj" and |\n' - '| | ' - '""f_code"". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_locals | The mapping ' - 'used by the frame to look up local |\n' - '| | variables. ' - 'If the frame refers to an *optimized |\n' - '| | scope*, this ' - 'may return a write-through proxy |\n' - '| | object. ' - 'Changed in version 3.13: Return a proxy |\n' - '| | for ' - 'optimized scopes. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_globals | The ' - 'dictionary used by the frame to look up global |\n' - '| | ' - 'variables |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_builtins | The ' - 'dictionary used by the frame to look up built- |\n' - '| | in ' - '(intrinsic) names |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_lasti | The “precise ' - 'instruction†of the frame object |\n' - '| | (this is an ' - 'index into the *bytecode* string of |\n' - '| | the code ' - 'object) |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Special writable attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_trace | If not ' - '"None", this is a function called for |\n' - '| | various ' - 'events during code execution (this is used |\n' - '| | by ' - 'debuggers). Normally an event is triggered for |\n' - '| | each new ' - 'source line (see "f_trace_lines"). |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_trace_lines | Set this ' - 'attribute to "False" to disable |\n' - '| | triggering a ' - 'tracing event for each source line. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_trace_opcodes | Set this ' - 'attribute to "True" to allow per-opcode |\n' - '| | events to be ' - 'requested. Note that this may lead to |\n' - '| | undefined ' - 'interpreter behaviour if exceptions |\n' - '| | raised by ' - 'the trace function escape to the |\n' - '| | function ' - 'being traced. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_lineno | The current ' - 'line number of the frame – writing to |\n' - '| | this from ' - 'within a trace function jumps to the |\n' - '| | given line ' - '(only for the bottom-most frame). A |\n' - '| | debugger can ' - 'implement a Jump command (aka Set |\n' - '| | Next ' - 'Statement) by writing to this attribute. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Frame object methods\n' - '~~~~~~~~~~~~~~~~~~~~\n' - '\n' - 'Frame objects support one method:\n' - '\n' - 'frame.clear()\n' - '\n' - ' This method clears all references to local variables held by ' - 'the\n' - ' frame. Also, if the frame belonged to a *generator*, the ' - 'generator\n' - ' is finalized. This helps break reference cycles involving ' - 'frame\n' - ' objects (for example when catching an exception and storing its\n' - ' traceback for later use).\n' - '\n' - ' "RuntimeError" is raised if the frame is currently executing or\n' - ' suspended.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - ' Changed in version 3.13: Attempting to clear a suspended frame\n' - ' raises "RuntimeError" (as has always been the case for ' - 'executing\n' - ' frames).\n' - '\n' - '\n' - 'Traceback objects\n' - '-----------------\n' - '\n' - 'Traceback objects represent the stack trace of an exception. A\n' - 'traceback object is implicitly created when an exception occurs, ' - 'and\n' - 'may also be explicitly created by calling "types.TracebackType".\n' - '\n' - 'Changed in version 3.7: Traceback objects can now be explicitly\n' - 'instantiated from Python code.\n' - '\n' - 'For implicitly created tracebacks, when the search for an ' - 'exception\n' - 'handler unwinds the execution stack, at each unwound level a ' - 'traceback\n' - 'object is inserted in front of the current traceback. When an\n' - 'exception handler is entered, the stack trace is made available to ' - 'the\n' - 'program. (See section The try statement.) It is accessible as the\n' - 'third item of the tuple returned by "sys.exc_info()", and as the\n' - '"__traceback__" attribute of the caught exception.\n' - '\n' - 'When the program contains no suitable handler, the stack trace is\n' - 'written (nicely formatted) to the standard error stream; if the\n' - 'interpreter is interactive, it is also made available to the user ' - 'as\n' - '"sys.last_traceback".\n' - '\n' - 'For explicitly created tracebacks, it is up to the creator of the\n' - 'traceback to determine how the "tb_next" attributes should be ' - 'linked\n' - 'to form a full stack trace.\n' - '\n' - 'Special read-only attributes:\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| traceback.tb_frame | Points to ' - 'the execution frame of the current |\n' - '| | level. ' - 'Accessing this attribute raises an |\n' - '| | auditing ' - 'event "object.__getattr__" with arguments |\n' - '| | "obj" and ' - '""tb_frame"". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| traceback.tb_lineno | Gives the ' - 'line number where the exception occurred |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| traceback.tb_lasti | Indicates ' - 'the “precise instructionâ€. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'The line number and last instruction in the traceback may differ ' - 'from\n' - 'the line number of its frame object if the exception occurred in a\n' - '"try" statement with no matching except clause or with a "finally"\n' - 'clause.\n' - '\n' - 'traceback.tb_next\n' - '\n' - ' The special writable attribute "tb_next" is the next level in ' - 'the\n' - ' stack trace (towards the frame where the exception occurred), ' - 'or\n' - ' "None" if there is no next level.\n' - '\n' - ' Changed in version 3.7: This attribute is now writable\n' - '\n' - '\n' - 'Slice objects\n' - '-------------\n' - '\n' - 'Slice objects are used to represent slices for "__getitem__()"\n' - 'methods. They are also created by the built-in "slice()" ' - 'function.\n' - '\n' - 'Special read-only attributes: "start" is the lower bound; "stop" ' - 'is\n' - 'the upper bound; "step" is the step value; each is "None" if ' - 'omitted.\n' - 'These attributes can have any type.\n' - '\n' - 'Slice objects support one method:\n' - '\n' - 'slice.indices(self, length)\n' - '\n' - ' This method takes a single integer argument *length* and ' - 'computes\n' - ' information about the slice that the slice object would describe ' - 'if\n' - ' applied to a sequence of *length* items. It returns a tuple of\n' - ' three integers; respectively these are the *start* and *stop*\n' - ' indices and the *step* or stride length of the slice. Missing ' - 'or\n' - ' out-of-bounds indices are handled in a manner consistent with\n' - ' regular slices.\n' - '\n' - '\n' - 'Static method objects\n' - '---------------------\n' - '\n' - 'Static method objects provide a way of defeating the transformation ' - 'of\n' - 'function objects to method objects described above. A static ' - 'method\n' - 'object is a wrapper around any other object, usually a ' - 'user-defined\n' - 'method object. When a static method object is retrieved from a ' - 'class\n' - 'or a class instance, the object actually returned is the wrapped\n' - 'object, which is not subject to any further transformation. Static\n' - 'method objects are also callable. Static method objects are created ' - 'by\n' - 'the built-in "staticmethod()" constructor.\n' - '\n' - '\n' - 'Class method objects\n' - '--------------------\n' - '\n' - 'A class method object, like a static method object, is a wrapper\n' - 'around another object that alters the way in which that object is\n' - 'retrieved from classes and class instances. The behaviour of class\n' - 'method objects upon such retrieval is described above, under ' - '“instance\n' - 'methodsâ€. Class method objects are created by the built-in\n' - '"classmethod()" constructor.\n', - 'typesfunctions': 'Functions\n' - '*********\n' - '\n' - 'Function objects are created by function definitions. The ' - 'only\n' - 'operation on a function object is to call it: ' - '"func(argument-list)".\n' - '\n' - 'There are really two flavors of function objects: built-in ' - 'functions\n' - 'and user-defined functions. Both support the same ' - 'operation (to call\n' - 'the function), but the implementation is different, hence ' - 'the\n' - 'different object types.\n' - '\n' - 'See Function definitions for more information.\n', - 'typesmapping': 'Mapping Types — "dict"\n' - '**********************\n' - '\n' - 'A *mapping* object maps *hashable* values to arbitrary ' - 'objects.\n' - 'Mappings are mutable objects. There is currently only one ' - 'standard\n' - 'mapping type, the *dictionary*. (For other containers see ' - 'the built-\n' - 'in "list", "set", and "tuple" classes, and the "collections" ' - 'module.)\n' - '\n' - 'A dictionary’s keys are *almost* arbitrary values. Values ' - 'that are\n' - 'not *hashable*, that is, values containing lists, ' - 'dictionaries or\n' - 'other mutable types (that are compared by value rather than ' - 'by object\n' - 'identity) may not be used as keys. Values that compare equal ' - '(such as\n' - '"1", "1.0", and "True") can be used interchangeably to index ' - 'the same\n' - 'dictionary entry.\n' - '\n' - 'class dict(**kwargs)\n' - 'class dict(mapping, **kwargs)\n' - 'class dict(iterable, **kwargs)\n' - '\n' - ' Return a new dictionary initialized from an optional ' - 'positional\n' - ' argument and a possibly empty set of keyword arguments.\n' - '\n' - ' Dictionaries can be created by several means:\n' - '\n' - ' * Use a comma-separated list of "key: value" pairs within ' - 'braces:\n' - ' "{\'jack\': 4098, \'sjoerd\': 4127}" or "{4098: ' - "'jack', 4127:\n" - ' \'sjoerd\'}"\n' - '\n' - ' * Use a dict comprehension: "{}", "{x: x ** 2 for x in ' - 'range(10)}"\n' - '\n' - ' * Use the type constructor: "dict()", "dict([(\'foo\', ' - "100), ('bar',\n" - ' 200)])", "dict(foo=100, bar=200)"\n' - '\n' - ' If no positional argument is given, an empty dictionary ' - 'is created.\n' - ' If a positional argument is given and it defines a ' - '"keys()" method,\n' - ' a dictionary is created by calling "__getitem__()" on the ' - 'argument\n' - ' with each returned key from the method. Otherwise, the ' - 'positional\n' - ' argument must be an *iterable* object. Each item in the ' - 'iterable\n' - ' must itself be an iterable with exactly two elements. ' - 'The first\n' - ' element of each item becomes a key in the new dictionary, ' - 'and the\n' - ' second element the corresponding value. If a key occurs ' - 'more than\n' - ' once, the last value for that key becomes the ' - 'corresponding value\n' - ' in the new dictionary.\n' - '\n' - ' If keyword arguments are given, the keyword arguments and ' - 'their\n' - ' values are added to the dictionary created from the ' - 'positional\n' - ' argument. If a key being added is already present, the ' - 'value from\n' - ' the keyword argument replaces the value from the ' - 'positional\n' - ' argument.\n' - '\n' - ' To illustrate, the following examples all return a ' - 'dictionary equal\n' - ' to "{"one": 1, "two": 2, "three": 3}":\n' - '\n' - ' >>> a = dict(one=1, two=2, three=3)\n' - " >>> b = {'one': 1, 'two': 2, 'three': 3}\n" - " >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))\n" - " >>> d = dict([('two', 2), ('one', 1), ('three', 3)])\n" - " >>> e = dict({'three': 3, 'one': 1, 'two': 2})\n" - " >>> f = dict({'one': 1, 'three': 3}, two=2)\n" - ' >>> a == b == c == d == e == f\n' - ' True\n' - '\n' - ' Providing keyword arguments as in the first example only ' - 'works for\n' - ' keys that are valid Python identifiers. Otherwise, any ' - 'valid keys\n' - ' can be used.\n' - '\n' - ' These are the operations that dictionaries support (and ' - 'therefore,\n' - ' custom mapping types should support too):\n' - '\n' - ' list(d)\n' - '\n' - ' Return a list of all the keys used in the dictionary ' - '*d*.\n' - '\n' - ' len(d)\n' - '\n' - ' Return the number of items in the dictionary *d*.\n' - '\n' - ' d[key]\n' - '\n' - ' Return the item of *d* with key *key*. Raises a ' - '"KeyError" if\n' - ' *key* is not in the map.\n' - '\n' - ' If a subclass of dict defines a method "__missing__()" ' - 'and *key*\n' - ' is not present, the "d[key]" operation calls that ' - 'method with\n' - ' the key *key* as argument. The "d[key]" operation ' - 'then returns\n' - ' or raises whatever is returned or raised by the\n' - ' "__missing__(key)" call. No other operations or ' - 'methods invoke\n' - ' "__missing__()". If "__missing__()" is not defined, ' - '"KeyError"\n' - ' is raised. "__missing__()" must be a method; it cannot ' - 'be an\n' - ' instance variable:\n' - '\n' - ' >>> class Counter(dict):\n' - ' ... def __missing__(self, key):\n' - ' ... return 0\n' - ' ...\n' - ' >>> c = Counter()\n' - " >>> c['red']\n" - ' 0\n' - " >>> c['red'] += 1\n" - " >>> c['red']\n" - ' 1\n' - '\n' - ' The example above shows part of the implementation of\n' - ' "collections.Counter". A different "__missing__" ' - 'method is used\n' - ' by "collections.defaultdict".\n' - '\n' - ' d[key] = value\n' - '\n' - ' Set "d[key]" to *value*.\n' - '\n' - ' del d[key]\n' - '\n' - ' Remove "d[key]" from *d*. Raises a "KeyError" if ' - '*key* is not\n' - ' in the map.\n' - '\n' - ' key in d\n' - '\n' - ' Return "True" if *d* has a key *key*, else "False".\n' - '\n' - ' key not in d\n' - '\n' - ' Equivalent to "not key in d".\n' - '\n' - ' iter(d)\n' - '\n' - ' Return an iterator over the keys of the dictionary. ' - 'This is a\n' - ' shortcut for "iter(d.keys())".\n' - '\n' - ' clear()\n' - '\n' - ' Remove all items from the dictionary.\n' - '\n' - ' copy()\n' - '\n' - ' Return a shallow copy of the dictionary.\n' - '\n' - ' classmethod fromkeys(iterable, value=None, /)\n' - '\n' - ' Create a new dictionary with keys from *iterable* and ' - 'values set\n' - ' to *value*.\n' - '\n' - ' "fromkeys()" is a class method that returns a new ' - 'dictionary.\n' - ' *value* defaults to "None". All of the values refer ' - 'to just a\n' - ' single instance, so it generally doesn’t make sense ' - 'for *value*\n' - ' to be a mutable object such as an empty list. To get ' - 'distinct\n' - ' values, use a dict comprehension instead.\n' - '\n' - ' get(key, default=None)\n' - '\n' - ' Return the value for *key* if *key* is in the ' - 'dictionary, else\n' - ' *default*. If *default* is not given, it defaults to ' - '"None", so\n' - ' that this method never raises a "KeyError".\n' - '\n' - ' items()\n' - '\n' - ' Return a new view of the dictionary’s items ("(key, ' - 'value)"\n' - ' pairs). See the documentation of view objects.\n' - '\n' - ' keys()\n' - '\n' - ' Return a new view of the dictionary’s keys. See the\n' - ' documentation of view objects.\n' - '\n' - ' pop(key[, default])\n' - '\n' - ' If *key* is in the dictionary, remove it and return ' - 'its value,\n' - ' else return *default*. If *default* is not given and ' - '*key* is\n' - ' not in the dictionary, a "KeyError" is raised.\n' - '\n' - ' popitem()\n' - '\n' - ' Remove and return a "(key, value)" pair from the ' - 'dictionary.\n' - ' Pairs are returned in LIFO (last-in, first-out) ' - 'order.\n' - '\n' - ' "popitem()" is useful to destructively iterate over a\n' - ' dictionary, as often used in set algorithms. If the ' - 'dictionary\n' - ' is empty, calling "popitem()" raises a "KeyError".\n' - '\n' - ' Changed in version 3.7: LIFO order is now guaranteed. ' - 'In prior\n' - ' versions, "popitem()" would return an arbitrary ' - 'key/value pair.\n' - '\n' - ' reversed(d)\n' - '\n' - ' Return a reverse iterator over the keys of the ' - 'dictionary. This\n' - ' is a shortcut for "reversed(d.keys())".\n' - '\n' - ' Added in version 3.8.\n' - '\n' - ' setdefault(key, default=None)\n' - '\n' - ' If *key* is in the dictionary, return its value. If ' - 'not, insert\n' - ' *key* with a value of *default* and return *default*. ' - '*default*\n' - ' defaults to "None".\n' - '\n' - ' update([other])\n' - '\n' - ' Update the dictionary with the key/value pairs from ' - '*other*,\n' - ' overwriting existing keys. Return "None".\n' - '\n' - ' "update()" accepts either another object with a ' - '"keys()" method\n' - ' (in which case "__getitem__()" is called with every ' - 'key returned\n' - ' from the method) or an iterable of key/value pairs (as ' - 'tuples or\n' - ' other iterables of length two). If keyword arguments ' - 'are\n' - ' specified, the dictionary is then updated with those ' - 'key/value\n' - ' pairs: "d.update(red=1, blue=2)".\n' - '\n' - ' values()\n' - '\n' - ' Return a new view of the dictionary’s values. See ' - 'the\n' - ' documentation of view objects.\n' - '\n' - ' An equality comparison between one "dict.values()" ' - 'view and\n' - ' another will always return "False". This also applies ' - 'when\n' - ' comparing "dict.values()" to itself:\n' - '\n' - " >>> d = {'a': 1}\n" - ' >>> d.values() == d.values()\n' - ' False\n' - '\n' - ' d | other\n' - '\n' - ' Create a new dictionary with the merged keys and ' - 'values of *d*\n' - ' and *other*, which must both be dictionaries. The ' - 'values of\n' - ' *other* take priority when *d* and *other* share ' - 'keys.\n' - '\n' - ' Added in version 3.9.\n' - '\n' - ' d |= other\n' - '\n' - ' Update the dictionary *d* with keys and values from ' - '*other*,\n' - ' which may be either a *mapping* or an *iterable* of ' - 'key/value\n' - ' pairs. The values of *other* take priority when *d* ' - 'and *other*\n' - ' share keys.\n' - '\n' - ' Added in version 3.9.\n' - '\n' - ' Dictionaries compare equal if and only if they have the ' - 'same "(key,\n' - ' value)" pairs (regardless of ordering). Order comparisons ' - '(‘<’,\n' - ' ‘<=’, ‘>=’, ‘>’) raise "TypeError".\n' - '\n' - ' Dictionaries preserve insertion order. Note that ' - 'updating a key\n' - ' does not affect the order. Keys added after deletion are ' - 'inserted\n' - ' at the end.\n' - '\n' - ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n' - ' >>> d\n' - " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> list(d)\n' - " ['one', 'two', 'three', 'four']\n" - ' >>> list(d.values())\n' - ' [1, 2, 3, 4]\n' - ' >>> d["one"] = 42\n' - ' >>> d\n' - " {'one': 42, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> del d["two"]\n' - ' >>> d["two"] = None\n' - ' >>> d\n' - " {'one': 42, 'three': 3, 'four': 4, 'two': None}\n" - '\n' - ' Changed in version 3.7: Dictionary order is guaranteed to ' - 'be\n' - ' insertion order. This behavior was an implementation ' - 'detail of\n' - ' CPython from 3.6.\n' - '\n' - ' Dictionaries and dictionary views are reversible.\n' - '\n' - ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n' - ' >>> d\n' - " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> list(reversed(d))\n' - " ['four', 'three', 'two', 'one']\n" - ' >>> list(reversed(d.values()))\n' - ' [4, 3, 2, 1]\n' - ' >>> list(reversed(d.items()))\n' - " [('four', 4), ('three', 3), ('two', 2), ('one', 1)]\n" - '\n' - ' Changed in version 3.8: Dictionaries are now reversible.\n' - '\n' - 'See also:\n' - '\n' - ' "types.MappingProxyType" can be used to create a read-only ' - 'view of a\n' - ' "dict".\n' - '\n' - '\n' - 'Dictionary view objects\n' - '=======================\n' - '\n' - 'The objects returned by "dict.keys()", "dict.values()" and\n' - '"dict.items()" are *view objects*. They provide a dynamic ' - 'view on the\n' - 'dictionary’s entries, which means that when the dictionary ' - 'changes,\n' - 'the view reflects these changes.\n' - '\n' - 'Dictionary views can be iterated over to yield their ' - 'respective data,\n' - 'and support membership tests:\n' - '\n' - 'len(dictview)\n' - '\n' - ' Return the number of entries in the dictionary.\n' - '\n' - 'iter(dictview)\n' - '\n' - ' Return an iterator over the keys, values or items ' - '(represented as\n' - ' tuples of "(key, value)") in the dictionary.\n' - '\n' - ' Keys and values are iterated over in insertion order. ' - 'This allows\n' - ' the creation of "(value, key)" pairs using "zip()": ' - '"pairs =\n' - ' zip(d.values(), d.keys())". Another way to create the ' - 'same list is\n' - ' "pairs = [(v, k) for (k, v) in d.items()]".\n' - '\n' - ' Iterating views while adding or deleting entries in the ' - 'dictionary\n' - ' may raise a "RuntimeError" or fail to iterate over all ' - 'entries.\n' - '\n' - ' Changed in version 3.7: Dictionary order is guaranteed to ' - 'be\n' - ' insertion order.\n' - '\n' - 'x in dictview\n' - '\n' - ' Return "True" if *x* is in the underlying dictionary’s ' - 'keys, values\n' - ' or items (in the latter case, *x* should be a "(key, ' - 'value)"\n' - ' tuple).\n' - '\n' - 'reversed(dictview)\n' - '\n' - ' Return a reverse iterator over the keys, values or items ' - 'of the\n' - ' dictionary. The view will be iterated in reverse order of ' - 'the\n' - ' insertion.\n' - '\n' - ' Changed in version 3.8: Dictionary views are now ' - 'reversible.\n' - '\n' - 'dictview.mapping\n' - '\n' - ' Return a "types.MappingProxyType" that wraps the ' - 'original\n' - ' dictionary to which the view refers.\n' - '\n' - ' Added in version 3.10.\n' - '\n' - 'Keys views are set-like since their entries are unique and ' - '*hashable*.\n' - 'Items views also have set-like operations since the (key, ' - 'value) pairs\n' - 'are unique and the keys are hashable. If all values in an ' - 'items view\n' - 'are hashable as well, then the items view can interoperate ' - 'with other\n' - 'sets. (Values views are not treated as set-like since the ' - 'entries are\n' - 'generally not unique.) For set-like views, all of the ' - 'operations\n' - 'defined for the abstract base class "collections.abc.Set" ' - 'are\n' - 'available (for example, "==", "<", or "^"). While using ' - 'set\n' - 'operators, set-like views accept any iterable as the other ' - 'operand,\n' - 'unlike sets which only accept sets as the input.\n' - '\n' - 'An example of dictionary view usage:\n' - '\n' - " >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, " - "'spam': 500}\n" - ' >>> keys = dishes.keys()\n' - ' >>> values = dishes.values()\n' - '\n' - ' >>> # iteration\n' - ' >>> n = 0\n' - ' >>> for val in values:\n' - ' ... n += val\n' - ' ...\n' - ' >>> print(n)\n' - ' 504\n' - '\n' - ' >>> # keys and values are iterated over in the same order ' - '(insertion order)\n' - ' >>> list(keys)\n' - " ['eggs', 'sausage', 'bacon', 'spam']\n" - ' >>> list(values)\n' - ' [2, 1, 1, 500]\n' - '\n' - ' >>> # view objects are dynamic and reflect dict changes\n' - " >>> del dishes['eggs']\n" - " >>> del dishes['sausage']\n" - ' >>> list(keys)\n' - " ['bacon', 'spam']\n" - '\n' - ' >>> # set operations\n' - " >>> keys & {'eggs', 'bacon', 'salad'}\n" - " {'bacon'}\n" - " >>> keys ^ {'sausage', 'juice'} == {'juice', 'sausage', " - "'bacon', 'spam'}\n" - ' True\n' - " >>> keys | ['juice', 'juice', 'juice'] == {'bacon', " - "'spam', 'juice'}\n" - ' True\n' - '\n' - ' >>> # get back a read-only proxy for the original ' - 'dictionary\n' - ' >>> values.mapping\n' - " mappingproxy({'bacon': 1, 'spam': 500})\n" - " >>> values.mapping['spam']\n" - ' 500\n', - 'typesmethods': 'Methods\n' - '*******\n' - '\n' - 'Methods are functions that are called using the attribute ' - 'notation.\n' - 'There are two flavors: built-in methods (such as "append()" ' - 'on lists)\n' - 'and class instance method. Built-in methods are described ' - 'with the\n' - 'types that support them.\n' - '\n' - 'If you access a method (a function defined in a class ' - 'namespace)\n' - 'through an instance, you get a special object: a *bound ' - 'method* (also\n' - 'called instance method) object. When called, it will add the ' - '"self"\n' - 'argument to the argument list. Bound methods have two ' - 'special read-\n' - 'only attributes: "m.__self__" is the object on which the ' - 'method\n' - 'operates, and "m.__func__" is the function implementing the ' - 'method.\n' - 'Calling "m(arg-1, arg-2, ..., arg-n)" is completely ' - 'equivalent to\n' - 'calling "m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)".\n' - '\n' - 'Like function objects, bound method objects support getting ' - 'arbitrary\n' - 'attributes. However, since method attributes are actually ' - 'stored on\n' - 'the underlying function object ("method.__func__"), setting ' - 'method\n' - 'attributes on bound methods is disallowed. Attempting to ' - 'set an\n' - 'attribute on a method results in an "AttributeError" being ' - 'raised. In\n' - 'order to set a method attribute, you need to explicitly set ' - 'it on the\n' - 'underlying function object:\n' - '\n' - ' >>> class C:\n' - ' ... def method(self):\n' - ' ... pass\n' - ' ...\n' - ' >>> c = C()\n' - " >>> c.method.whoami = 'my name is method' # can't set on " - 'the method\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " AttributeError: 'method' object has no attribute " - "'whoami'\n" - " >>> c.method.__func__.whoami = 'my name is method'\n" - ' >>> c.method.whoami\n' - " 'my name is method'\n" - '\n' - 'See Instance methods for more information.\n', - 'typesmodules': 'Modules\n' - '*******\n' - '\n' - 'The only special operation on a module is attribute access: ' - '"m.name",\n' - 'where *m* is a module and *name* accesses a name defined in ' - '*m*’s\n' - 'symbol table. Module attributes can be assigned to. (Note ' - 'that the\n' - '"import" statement is not, strictly speaking, an operation ' - 'on a module\n' - 'object; "import foo" does not require a module object named ' - '*foo* to\n' - 'exist, rather it requires an (external) *definition* for a ' - 'module\n' - 'named *foo* somewhere.)\n' - '\n' - 'A special attribute of every module is "__dict__". This is ' - 'the\n' - 'dictionary containing the module’s symbol table. Modifying ' - 'this\n' - 'dictionary will actually change the module’s symbol table, ' - 'but direct\n' - 'assignment to the "__dict__" attribute is not possible (you ' - 'can write\n' - '"m.__dict__[\'a\'] = 1", which defines "m.a" to be "1", but ' - 'you can’t\n' - 'write "m.__dict__ = {}"). Modifying "__dict__" directly is ' - 'not\n' - 'recommended.\n' - '\n' - 'Modules built into the interpreter are written like this: ' - '"". If loaded from a file, they are ' - 'written as\n' - '"".\n', - 'typesseq': 'Sequence Types — "list", "tuple", "range"\n' - '*****************************************\n' - '\n' - 'There are three basic sequence types: lists, tuples, and range\n' - 'objects. Additional sequence types tailored for processing of ' - 'binary\n' - 'data and text strings are described in dedicated sections.\n' - '\n' - '\n' - 'Common Sequence Operations\n' - '==========================\n' - '\n' - 'The operations in the following table are supported by most ' - 'sequence\n' - 'types, both mutable and immutable. The ' - '"collections.abc.Sequence" ABC\n' - 'is provided to make it easier to correctly implement these ' - 'operations\n' - 'on custom sequence types.\n' - '\n' - 'This table lists the sequence operations sorted in ascending ' - 'priority.\n' - 'In the table, *s* and *t* are sequences of the same type, *n*, ' - '*i*,\n' - '*j* and *k* are integers and *x* is an arbitrary object that ' - 'meets any\n' - 'type and value restrictions imposed by *s*.\n' - '\n' - 'The "in" and "not in" operations have the same priorities as ' - 'the\n' - 'comparison operations. The "+" (concatenation) and "*" ' - '(repetition)\n' - 'operations have the same priority as the corresponding numeric\n' - 'operations. [3]\n' - '\n' - '+----------------------------+----------------------------------+------------+\n' - '| Operation | Result ' - '| Notes |\n' - '|============================|==================================|============|\n' - '| "x in s" | "True" if an item of *s* is ' - '| (1) |\n' - '| | equal to *x*, else "False" ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "x not in s" | "False" if an item of *s* is ' - '| (1) |\n' - '| | equal to *x*, else "True" ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s + t" | the concatenation of *s* and *t* ' - '| (6)(7) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s * n" or "n * s" | equivalent to adding *s* to ' - '| (2)(7) |\n' - '| | itself *n* times ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i]" | *i*th item of *s*, origin 0 ' - '| (3) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i:j]" | slice of *s* from *i* to *j* ' - '| (3)(4) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i:j:k]" | slice of *s* from *i* to *j* ' - '| (3)(5) |\n' - '| | with step *k* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "len(s)" | length of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "min(s)" | smallest item of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "max(s)" | largest item of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s.index(x[, i[, j]])" | index of the first occurrence of ' - '| (8) |\n' - '| | *x* in *s* (at or after index ' - '| |\n' - '| | *i* and before index *j*) ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s.count(x)" | total number of occurrences of ' - '| |\n' - '| | *x* in *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '\n' - 'Sequences of the same type also support comparisons. In ' - 'particular,\n' - 'tuples and lists are compared lexicographically by comparing\n' - 'corresponding elements. This means that to compare equal, every\n' - 'element must compare equal and the two sequences must be of the ' - 'same\n' - 'type and have the same length. (For full details see ' - 'Comparisons in\n' - 'the language reference.)\n' - '\n' - 'Forward and reversed iterators over mutable sequences access ' - 'values\n' - 'using an index. That index will continue to march forward (or\n' - 'backward) even if the underlying sequence is mutated. The ' - 'iterator\n' - 'terminates only when an "IndexError" or a "StopIteration" is\n' - 'encountered (or when the index drops below zero).\n' - '\n' - 'Notes:\n' - '\n' - '1. While the "in" and "not in" operations are used only for ' - 'simple\n' - ' containment testing in the general case, some specialised ' - 'sequences\n' - ' (such as "str", "bytes" and "bytearray") also use them for\n' - ' subsequence testing:\n' - '\n' - ' >>> "gg" in "eggs"\n' - ' True\n' - '\n' - '2. Values of *n* less than "0" are treated as "0" (which yields ' - 'an\n' - ' empty sequence of the same type as *s*). Note that items in ' - 'the\n' - ' sequence *s* are not copied; they are referenced multiple ' - 'times.\n' - ' This often haunts new Python programmers; consider:\n' - '\n' - ' >>> lists = [[]] * 3\n' - ' >>> lists\n' - ' [[], [], []]\n' - ' >>> lists[0].append(3)\n' - ' >>> lists\n' - ' [[3], [3], [3]]\n' - '\n' - ' What has happened is that "[[]]" is a one-element list ' - 'containing\n' - ' an empty list, so all three elements of "[[]] * 3" are ' - 'references\n' - ' to this single empty list. Modifying any of the elements of\n' - ' "lists" modifies this single list. You can create a list of\n' - ' different lists this way:\n' - '\n' - ' >>> lists = [[] for i in range(3)]\n' - ' >>> lists[0].append(3)\n' - ' >>> lists[1].append(5)\n' - ' >>> lists[2].append(7)\n' - ' >>> lists\n' - ' [[3], [5], [7]]\n' - '\n' - ' Further explanation is available in the FAQ entry How do I ' - 'create a\n' - ' multidimensional list?.\n' - '\n' - '3. If *i* or *j* is negative, the index is relative to the end ' - 'of\n' - ' sequence *s*: "len(s) + i" or "len(s) + j" is substituted. ' - 'But\n' - ' note that "-0" is still "0".\n' - '\n' - '4. The slice of *s* from *i* to *j* is defined as the sequence ' - 'of\n' - ' items with index *k* such that "i <= k < j". If *i* or *j* ' - 'is\n' - ' greater than "len(s)", use "len(s)". If *i* is omitted or ' - '"None",\n' - ' use "0". If *j* is omitted or "None", use "len(s)". If *i* ' - 'is\n' - ' greater than or equal to *j*, the slice is empty.\n' - '\n' - '5. The slice of *s* from *i* to *j* with step *k* is defined as ' - 'the\n' - ' sequence of items with index "x = i + n*k" such that "0 <= n ' - '<\n' - ' (j-i)/k". In other words, the indices are "i", "i+k", ' - '"i+2*k",\n' - ' "i+3*k" and so on, stopping when *j* is reached (but never\n' - ' including *j*). When *k* is positive, *i* and *j* are ' - 'reduced to\n' - ' "len(s)" if they are greater. When *k* is negative, *i* and ' - '*j* are\n' - ' reduced to "len(s) - 1" if they are greater. If *i* or *j* ' - 'are\n' - ' omitted or "None", they become “end†values (which end ' - 'depends on\n' - ' the sign of *k*). Note, *k* cannot be zero. If *k* is ' - '"None", it\n' - ' is treated like "1".\n' - '\n' - '6. Concatenating immutable sequences always results in a new ' - 'object.\n' - ' This means that building up a sequence by repeated ' - 'concatenation\n' - ' will have a quadratic runtime cost in the total sequence ' - 'length.\n' - ' To get a linear runtime cost, you must switch to one of the\n' - ' alternatives below:\n' - '\n' - ' * if concatenating "str" objects, you can build a list and ' - 'use\n' - ' "str.join()" at the end or else write to an "io.StringIO"\n' - ' instance and retrieve its value when complete\n' - '\n' - ' * if concatenating "bytes" objects, you can similarly use\n' - ' "bytes.join()" or "io.BytesIO", or you can do in-place\n' - ' concatenation with a "bytearray" object. "bytearray" ' - 'objects are\n' - ' mutable and have an efficient overallocation mechanism\n' - '\n' - ' * if concatenating "tuple" objects, extend a "list" instead\n' - '\n' - ' * for other types, investigate the relevant class ' - 'documentation\n' - '\n' - '7. Some sequence types (such as "range") only support item ' - 'sequences\n' - ' that follow specific patterns, and hence don’t support ' - 'sequence\n' - ' concatenation or repetition.\n' - '\n' - '8. "index" raises "ValueError" when *x* is not found in *s*. Not ' - 'all\n' - ' implementations support passing the additional arguments *i* ' - 'and\n' - ' *j*. These arguments allow efficient searching of subsections ' - 'of\n' - ' the sequence. Passing the extra arguments is roughly ' - 'equivalent to\n' - ' using "s[i:j].index(x)", only without copying any data and ' - 'with the\n' - ' returned index being relative to the start of the sequence ' - 'rather\n' - ' than the start of the slice.\n' - '\n' - '\n' - 'Immutable Sequence Types\n' - '========================\n' - '\n' - 'The only operation that immutable sequence types generally ' - 'implement\n' - 'that is not also implemented by mutable sequence types is ' - 'support for\n' - 'the "hash()" built-in.\n' - '\n' - 'This support allows immutable sequences, such as "tuple" ' - 'instances, to\n' - 'be used as "dict" keys and stored in "set" and "frozenset" ' - 'instances.\n' - '\n' - 'Attempting to hash an immutable sequence that contains ' - 'unhashable\n' - 'values will result in "TypeError".\n' - '\n' - '\n' - 'Mutable Sequence Types\n' - '======================\n' - '\n' - 'The operations in the following table are defined on mutable ' - 'sequence\n' - 'types. The "collections.abc.MutableSequence" ABC is provided to ' - 'make\n' - 'it easier to correctly implement these operations on custom ' - 'sequence\n' - 'types.\n' - '\n' - 'In the table *s* is an instance of a mutable sequence type, *t* ' - 'is any\n' - 'iterable object and *x* is an arbitrary object that meets any ' - 'type and\n' - 'value restrictions imposed by *s* (for example, "bytearray" ' - 'only\n' - 'accepts integers that meet the value restriction "0 <= x <= ' - '255").\n' - '\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| Operation | ' - 'Result | Notes |\n' - '|================================|==================================|=======================|\n' - '| "s[i] = x" | item *i* of *s* is replaced ' - 'by | |\n' - '| | ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j] = t" | slice of *s* from *i* to *j* ' - 'is | |\n' - '| | replaced by the contents of ' - 'the | |\n' - '| | iterable ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j]" | same as "s[i:j] = ' - '[]" | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j:k] = t" | the elements of "s[i:j:k]" ' - 'are | (1) |\n' - '| | replaced by those of ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j:k]" | removes the elements ' - 'of | |\n' - '| | "s[i:j:k]" from the ' - 'list | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.append(x)" | appends *x* to the end of ' - 'the | |\n' - '| | sequence (same ' - 'as | |\n' - '| | "s[len(s):len(s)] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.clear()" | removes all items from *s* ' - '(same | (5) |\n' - '| | as "del ' - 's[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.copy()" | creates a shallow copy of ' - '*s* | (5) |\n' - '| | (same as ' - '"s[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.extend(t)" or "s += t" | extends *s* with the contents ' - 'of | |\n' - '| | *t* (for the most part the ' - 'same | |\n' - '| | as "s[len(s):len(s)] = ' - 't") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s *= n" | updates *s* with its ' - 'contents | (6) |\n' - '| | repeated *n* ' - 'times | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.insert(i, x)" | inserts *x* into *s* at ' - 'the | |\n' - '| | index given by *i* (same ' - 'as | |\n' - '| | "s[i:i] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.pop()" or "s.pop(i)" | retrieves the item at *i* ' - 'and | (2) |\n' - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | removes the first item from ' - '*s* | (3) |\n' - '| | where "s[i]" is equal to ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.reverse()" | reverses the items of *s* ' - 'in | (4) |\n' - '| | ' - 'place | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '\n' - 'Notes:\n' - '\n' - '1. If *k* is not equal to "1", *t* must have the same length as ' - 'the\n' - ' slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that by ' - 'default the\n' - ' last item is removed and returned.\n' - '\n' - '3. "remove()" raises "ValueError" when *x* is not found in *s*.\n' - '\n' - '4. The "reverse()" method modifies the sequence in place for ' - 'economy\n' - ' of space when reversing a large sequence. To remind users ' - 'that it\n' - ' operates by side effect, it does not return the reversed ' - 'sequence.\n' - '\n' - '5. "clear()" and "copy()" are included for consistency with the\n' - ' interfaces of mutable containers that don’t support slicing\n' - ' operations (such as "dict" and "set"). "copy()" is not part ' - 'of the\n' - ' "collections.abc.MutableSequence" ABC, but most concrete ' - 'mutable\n' - ' sequence classes provide it.\n' - '\n' - ' Added in version 3.3: "clear()" and "copy()" methods.\n' - '\n' - '6. The value *n* is an integer, or an object implementing\n' - ' "__index__()". Zero and negative values of *n* clear the ' - 'sequence.\n' - ' Items in the sequence are not copied; they are referenced ' - 'multiple\n' - ' times, as explained for "s * n" under Common Sequence ' - 'Operations.\n' - '\n' - '\n' - 'Lists\n' - '=====\n' - '\n' - 'Lists are mutable sequences, typically used to store collections ' - 'of\n' - 'homogeneous items (where the precise degree of similarity will ' - 'vary by\n' - 'application).\n' - '\n' - 'class list([iterable])\n' - '\n' - ' Lists may be constructed in several ways:\n' - '\n' - ' * Using a pair of square brackets to denote the empty list: ' - '"[]"\n' - '\n' - ' * Using square brackets, separating items with commas: "[a]", ' - '"[a,\n' - ' b, c]"\n' - '\n' - ' * Using a list comprehension: "[x for x in iterable]"\n' - '\n' - ' * Using the type constructor: "list()" or "list(iterable)"\n' - '\n' - ' The constructor builds a list whose items are the same and in ' - 'the\n' - ' same order as *iterable*’s items. *iterable* may be either ' - 'a\n' - ' sequence, a container that supports iteration, or an ' - 'iterator\n' - ' object. If *iterable* is already a list, a copy is made and\n' - ' returned, similar to "iterable[:]". For example, ' - '"list(\'abc\')"\n' - ' returns "[\'a\', \'b\', \'c\']" and "list( (1, 2, 3) )" ' - 'returns "[1, 2,\n' - ' 3]". If no argument is given, the constructor creates a new ' - 'empty\n' - ' list, "[]".\n' - '\n' - ' Many other operations also produce lists, including the ' - '"sorted()"\n' - ' built-in.\n' - '\n' - ' Lists implement all of the common and mutable sequence ' - 'operations.\n' - ' Lists also provide the following additional method:\n' - '\n' - ' sort(*, key=None, reverse=False)\n' - '\n' - ' This method sorts the list in place, using only "<" ' - 'comparisons\n' - ' between items. Exceptions are not suppressed - if any ' - 'comparison\n' - ' operations fail, the entire sort operation will fail (and ' - 'the\n' - ' list will likely be left in a partially modified state).\n' - '\n' - ' "sort()" accepts two arguments that can only be passed by\n' - ' keyword (keyword-only arguments):\n' - '\n' - ' *key* specifies a function of one argument that is used ' - 'to\n' - ' extract a comparison key from each list element (for ' - 'example,\n' - ' "key=str.lower"). The key corresponding to each item in ' - 'the list\n' - ' is calculated once and then used for the entire sorting ' - 'process.\n' - ' The default value of "None" means that list items are ' - 'sorted\n' - ' directly without calculating a separate key value.\n' - '\n' - ' The "functools.cmp_to_key()" utility is available to ' - 'convert a\n' - ' 2.x style *cmp* function to a *key* function.\n' - '\n' - ' *reverse* is a boolean value. If set to "True", then the ' - 'list\n' - ' elements are sorted as if each comparison were reversed.\n' - '\n' - ' This method modifies the sequence in place for economy of ' - 'space\n' - ' when sorting a large sequence. To remind users that it ' - 'operates\n' - ' by side effect, it does not return the sorted sequence ' - '(use\n' - ' "sorted()" to explicitly request a new sorted list ' - 'instance).\n' - '\n' - ' The "sort()" method is guaranteed to be stable. A sort ' - 'is\n' - ' stable if it guarantees not to change the relative order ' - 'of\n' - ' elements that compare equal — this is helpful for sorting ' - 'in\n' - ' multiple passes (for example, sort by department, then by ' - 'salary\n' - ' grade).\n' - '\n' - ' For sorting examples and a brief sorting tutorial, see ' - 'Sorting\n' - ' Techniques.\n' - '\n' - ' **CPython implementation detail:** While a list is being ' - 'sorted,\n' - ' the effect of attempting to mutate, or even inspect, the ' - 'list is\n' - ' undefined. The C implementation of Python makes the list ' - 'appear\n' - ' empty for the duration, and raises "ValueError" if it can ' - 'detect\n' - ' that the list has been mutated during a sort.\n' - '\n' - '\n' - 'Tuples\n' - '======\n' - '\n' - 'Tuples are immutable sequences, typically used to store ' - 'collections of\n' - 'heterogeneous data (such as the 2-tuples produced by the ' - '"enumerate()"\n' - 'built-in). Tuples are also used for cases where an immutable ' - 'sequence\n' - 'of homogeneous data is needed (such as allowing storage in a ' - '"set" or\n' - '"dict" instance).\n' - '\n' - 'class tuple([iterable])\n' - '\n' - ' Tuples may be constructed in a number of ways:\n' - '\n' - ' * Using a pair of parentheses to denote the empty tuple: ' - '"()"\n' - '\n' - ' * Using a trailing comma for a singleton tuple: "a," or ' - '"(a,)"\n' - '\n' - ' * Separating items with commas: "a, b, c" or "(a, b, c)"\n' - '\n' - ' * Using the "tuple()" built-in: "tuple()" or ' - '"tuple(iterable)"\n' - '\n' - ' The constructor builds a tuple whose items are the same and ' - 'in the\n' - ' same order as *iterable*’s items. *iterable* may be either ' - 'a\n' - ' sequence, a container that supports iteration, or an ' - 'iterator\n' - ' object. If *iterable* is already a tuple, it is returned\n' - ' unchanged. For example, "tuple(\'abc\')" returns "(\'a\', ' - '\'b\', \'c\')"\n' - ' and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no argument ' - 'is\n' - ' given, the constructor creates a new empty tuple, "()".\n' - '\n' - ' Note that it is actually the comma which makes a tuple, not ' - 'the\n' - ' parentheses. The parentheses are optional, except in the ' - 'empty\n' - ' tuple case, or when they are needed to avoid syntactic ' - 'ambiguity.\n' - ' For example, "f(a, b, c)" is a function call with three ' - 'arguments,\n' - ' while "f((a, b, c))" is a function call with a 3-tuple as the ' - 'sole\n' - ' argument.\n' - '\n' - ' Tuples implement all of the common sequence operations.\n' - '\n' - 'For heterogeneous collections of data where access by name is ' - 'clearer\n' - 'than access by index, "collections.namedtuple()" may be a more\n' - 'appropriate choice than a simple tuple object.\n' - '\n' - '\n' - 'Ranges\n' - '======\n' - '\n' - 'The "range" type represents an immutable sequence of numbers and ' - 'is\n' - 'commonly used for looping a specific number of times in "for" ' - 'loops.\n' - '\n' - 'class range(stop)\n' - 'class range(start, stop[, step])\n' - '\n' - ' The arguments to the range constructor must be integers ' - '(either\n' - ' built-in "int" or any object that implements the ' - '"__index__()"\n' - ' special method). If the *step* argument is omitted, it ' - 'defaults to\n' - ' "1". If the *start* argument is omitted, it defaults to "0". ' - 'If\n' - ' *step* is zero, "ValueError" is raised.\n' - '\n' - ' For a positive *step*, the contents of a range "r" are ' - 'determined\n' - ' by the formula "r[i] = start + step*i" where "i >= 0" and ' - '"r[i] <\n' - ' stop".\n' - '\n' - ' For a negative *step*, the contents of the range are still\n' - ' determined by the formula "r[i] = start + step*i", but the\n' - ' constraints are "i >= 0" and "r[i] > stop".\n' - '\n' - ' A range object will be empty if "r[0]" does not meet the ' - 'value\n' - ' constraint. Ranges do support negative indices, but these ' - 'are\n' - ' interpreted as indexing from the end of the sequence ' - 'determined by\n' - ' the positive indices.\n' - '\n' - ' Ranges containing absolute values larger than "sys.maxsize" ' - 'are\n' - ' permitted but some features (such as "len()") may raise\n' - ' "OverflowError".\n' - '\n' - ' Range examples:\n' - '\n' - ' >>> list(range(10))\n' - ' [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n' - ' >>> list(range(1, 11))\n' - ' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n' - ' >>> list(range(0, 30, 5))\n' - ' [0, 5, 10, 15, 20, 25]\n' - ' >>> list(range(0, 10, 3))\n' - ' [0, 3, 6, 9]\n' - ' >>> list(range(0, -10, -1))\n' - ' [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n' - ' >>> list(range(0))\n' - ' []\n' - ' >>> list(range(1, 0))\n' - ' []\n' - '\n' - ' Ranges implement all of the common sequence operations ' - 'except\n' - ' concatenation and repetition (due to the fact that range ' - 'objects\n' - ' can only represent sequences that follow a strict pattern ' - 'and\n' - ' repetition and concatenation will usually violate that ' - 'pattern).\n' - '\n' - ' start\n' - '\n' - ' The value of the *start* parameter (or "0" if the ' - 'parameter was\n' - ' not supplied)\n' - '\n' - ' stop\n' - '\n' - ' The value of the *stop* parameter\n' - '\n' - ' step\n' - '\n' - ' The value of the *step* parameter (or "1" if the parameter ' - 'was\n' - ' not supplied)\n' - '\n' - 'The advantage of the "range" type over a regular "list" or ' - '"tuple" is\n' - 'that a "range" object will always take the same (small) amount ' - 'of\n' - 'memory, no matter the size of the range it represents (as it ' - 'only\n' - 'stores the "start", "stop" and "step" values, calculating ' - 'individual\n' - 'items and subranges as needed).\n' - '\n' - 'Range objects implement the "collections.abc.Sequence" ABC, and\n' - 'provide features such as containment tests, element index ' - 'lookup,\n' - 'slicing and support for negative indices (see Sequence Types — ' - 'list,\n' - 'tuple, range):\n' - '\n' - '>>> r = range(0, 20, 2)\n' - '>>> r\n' - 'range(0, 20, 2)\n' - '>>> 11 in r\n' - 'False\n' - '>>> 10 in r\n' - 'True\n' - '>>> r.index(10)\n' - '5\n' - '>>> r[5]\n' - '10\n' - '>>> r[:5]\n' - 'range(0, 10, 2)\n' - '>>> r[-1]\n' - '18\n' - '\n' - 'Testing range objects for equality with "==" and "!=" compares ' - 'them as\n' - 'sequences. That is, two range objects are considered equal if ' - 'they\n' - 'represent the same sequence of values. (Note that two range ' - 'objects\n' - 'that compare equal might have different "start", "stop" and ' - '"step"\n' - 'attributes, for example "range(0) == range(2, 1, 3)" or ' - '"range(0, 3,\n' - '2) == range(0, 4, 2)".)\n' - '\n' - 'Changed in version 3.2: Implement the Sequence ABC. Support ' - 'slicing\n' - 'and negative indices. Test "int" objects for membership in ' - 'constant\n' - 'time instead of iterating through all items.\n' - '\n' - 'Changed in version 3.3: Define ‘==’ and ‘!=’ to compare range ' - 'objects\n' - 'based on the sequence of values they define (instead of ' - 'comparing\n' - 'based on object identity).Added the "start", "stop" and "step"\n' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' * The linspace recipe shows how to implement a lazy version of ' - 'range\n' - ' suitable for floating-point applications.\n', - 'typesseq-mutable': 'Mutable Sequence Types\n' - '**********************\n' - '\n' - 'The operations in the following table are defined on ' - 'mutable sequence\n' - 'types. The "collections.abc.MutableSequence" ABC is ' - 'provided to make\n' - 'it easier to correctly implement these operations on ' - 'custom sequence\n' - 'types.\n' - '\n' - 'In the table *s* is an instance of a mutable sequence ' - 'type, *t* is any\n' - 'iterable object and *x* is an arbitrary object that ' - 'meets any type and\n' - 'value restrictions imposed by *s* (for example, ' - '"bytearray" only\n' - 'accepts integers that meet the value restriction "0 <= x ' - '<= 255").\n' - '\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| Operation | ' - 'Result | Notes ' - '|\n' - '|================================|==================================|=======================|\n' - '| "s[i] = x" | item *i* of *s* is ' - 'replaced by | |\n' - '| | ' - '*x* | ' - '|\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j] = t" | slice of *s* from *i* ' - 'to *j* is | |\n' - '| | replaced by the ' - 'contents of the | |\n' - '| | iterable ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j]" | same as "s[i:j] = ' - '[]" | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j:k] = t" | the elements of ' - '"s[i:j:k]" are | (1) |\n' - '| | replaced by those of ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j:k]" | removes the elements ' - 'of | |\n' - '| | "s[i:j:k]" from the ' - 'list | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.append(x)" | appends *x* to the ' - 'end of the | |\n' - '| | sequence (same ' - 'as | |\n' - '| | "s[len(s):len(s)] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.clear()" | removes all items ' - 'from *s* (same | (5) |\n' - '| | as "del ' - 's[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.copy()" | creates a shallow ' - 'copy of *s* | (5) |\n' - '| | (same as ' - '"s[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.extend(t)" or "s += t" | extends *s* with the ' - 'contents of | |\n' - '| | *t* (for the most ' - 'part the same | |\n' - '| | as "s[len(s):len(s)] ' - '= t") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s *= n" | updates *s* with its ' - 'contents | (6) |\n' - '| | repeated *n* ' - 'times | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.insert(i, x)" | inserts *x* into *s* ' - 'at the | |\n' - '| | index given by *i* ' - '(same as | |\n' - '| | "s[i:i] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.pop()" or "s.pop(i)" | retrieves the item at ' - '*i* and | (2) |\n' - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | removes the first ' - 'item from *s* | (3) |\n' - '| | where "s[i]" is equal ' - 'to *x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.reverse()" | reverses the items of ' - '*s* in | (4) |\n' - '| | ' - 'place | ' - '|\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '\n' - 'Notes:\n' - '\n' - '1. If *k* is not equal to "1", *t* must have the same ' - 'length as the\n' - ' slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that ' - 'by default the\n' - ' last item is removed and returned.\n' - '\n' - '3. "remove()" raises "ValueError" when *x* is not found ' - 'in *s*.\n' - '\n' - '4. The "reverse()" method modifies the sequence in place ' - 'for economy\n' - ' of space when reversing a large sequence. To remind ' - 'users that it\n' - ' operates by side effect, it does not return the ' - 'reversed sequence.\n' - '\n' - '5. "clear()" and "copy()" are included for consistency ' - 'with the\n' - ' interfaces of mutable containers that don’t support ' - 'slicing\n' - ' operations (such as "dict" and "set"). "copy()" is ' - 'not part of the\n' - ' "collections.abc.MutableSequence" ABC, but most ' - 'concrete mutable\n' - ' sequence classes provide it.\n' - '\n' - ' Added in version 3.3: "clear()" and "copy()" ' - 'methods.\n' - '\n' - '6. The value *n* is an integer, or an object ' - 'implementing\n' - ' "__index__()". Zero and negative values of *n* clear ' - 'the sequence.\n' - ' Items in the sequence are not copied; they are ' - 'referenced multiple\n' - ' times, as explained for "s * n" under Common Sequence ' - 'Operations.\n', - 'unary': 'Unary arithmetic and bitwise operations\n' - '***************************************\n' - '\n' - 'All unary arithmetic and bitwise operations have the same ' - 'priority:\n' - '\n' - ' u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n' - '\n' - 'The unary "-" (minus) operator yields the negation of its numeric\n' - 'argument; the operation can be overridden with the "__neg__()" ' - 'special\n' - 'method.\n' - '\n' - 'The unary "+" (plus) operator yields its numeric argument ' - 'unchanged;\n' - 'the operation can be overridden with the "__pos__()" special ' - 'method.\n' - '\n' - 'The unary "~" (invert) operator yields the bitwise inversion of ' - 'its\n' - 'integer argument. The bitwise inversion of "x" is defined as\n' - '"-(x+1)". It only applies to integral numbers or to custom ' - 'objects\n' - 'that override the "__invert__()" special method.\n' - '\n' - 'In all three cases, if the argument does not have the proper type, ' - 'a\n' - '"TypeError" exception is raised.\n', - 'while': 'The "while" statement\n' - '*********************\n' - '\n' - 'The "while" statement is used for repeated execution as long as an\n' - 'expression is true:\n' - '\n' - ' while_stmt ::= "while" assignment_expression ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'This repeatedly tests the expression and, if it is true, executes ' - 'the\n' - 'first suite; if the expression is false (which may be the first ' - 'time\n' - 'it is tested) the suite of the "else" clause, if present, is ' - 'executed\n' - 'and the loop terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and goes ' - 'back\n' - 'to testing the expression.\n', - 'with': 'The "with" statement\n' - '********************\n' - '\n' - 'The "with" statement is used to wrap the execution of a block with\n' - 'methods defined by a context manager (see section With Statement\n' - 'Context Managers). This allows common "try"…"except"…"finally" ' - 'usage\n' - 'patterns to be encapsulated for convenient reuse.\n' - '\n' - ' with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | ' - 'with_stmt_contents ) ":" suite\n' - ' with_stmt_contents ::= with_item ("," with_item)*\n' - ' with_item ::= expression ["as" target]\n' - '\n' - 'The execution of the "with" statement with one “item†proceeds as\n' - 'follows:\n' - '\n' - '1. The context expression (the expression given in the "with_item") ' - 'is\n' - ' evaluated to obtain a context manager.\n' - '\n' - '2. The context manager’s "__enter__()" is loaded for later use.\n' - '\n' - '3. The context manager’s "__exit__()" is loaded for later use.\n' - '\n' - '4. The context manager’s "__enter__()" method is invoked.\n' - '\n' - '5. If a target was included in the "with" statement, the return ' - 'value\n' - ' from "__enter__()" is assigned to it.\n' - '\n' - ' Note:\n' - '\n' - ' The "with" statement guarantees that if the "__enter__()" ' - 'method\n' - ' returns without an error, then "__exit__()" will always be\n' - ' called. Thus, if an error occurs during the assignment to the\n' - ' target list, it will be treated the same as an error occurring\n' - ' within the suite would be. See step 7 below.\n' - '\n' - '6. The suite is executed.\n' - '\n' - '7. The context manager’s "__exit__()" method is invoked. If an\n' - ' exception caused the suite to be exited, its type, value, and\n' - ' traceback are passed as arguments to "__exit__()". Otherwise, ' - 'three\n' - ' "None" arguments are supplied.\n' - '\n' - ' If the suite was exited due to an exception, and the return ' - 'value\n' - ' from the "__exit__()" method was false, the exception is ' - 'reraised.\n' - ' If the return value was true, the exception is suppressed, and\n' - ' execution continues with the statement following the "with"\n' - ' statement.\n' - '\n' - ' If the suite was exited for any reason other than an exception, ' - 'the\n' - ' return value from "__exit__()" is ignored, and execution ' - 'proceeds\n' - ' at the normal location for the kind of exit that was taken.\n' - '\n' - 'The following code:\n' - '\n' - ' with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' enter = type(manager).__enter__\n' - ' exit = type(manager).__exit__\n' - ' value = enter(manager)\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' if not exit(manager, *sys.exc_info()):\n' - ' raise\n' - ' else:\n' - ' exit(manager, None, None, None)\n' - '\n' - 'With more than one item, the context managers are processed as if\n' - 'multiple "with" statements were nested:\n' - '\n' - ' with A() as a, B() as b:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' with A() as a:\n' - ' with B() as b:\n' - ' SUITE\n' - '\n' - 'You can also write multi-item context managers in multiple lines if\n' - 'the items are surrounded by parentheses. For example:\n' - '\n' - ' with (\n' - ' A() as a,\n' - ' B() as b,\n' - ' ):\n' - ' SUITE\n' - '\n' - 'Changed in version 3.1: Support for multiple context expressions.\n' - '\n' - 'Changed in version 3.10: Support for using grouping parentheses to\n' - 'break the statement in multiple lines.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with†statement\n' - ' The specification, background, and examples for the Python ' - '"with"\n' - ' statement.\n', - 'yield': 'The "yield" statement\n' - '*********************\n' - '\n' - ' yield_stmt ::= yield_expression\n' - '\n' - 'A "yield" statement is semantically equivalent to a yield ' - 'expression.\n' - 'The "yield" statement can be used to omit the parentheses that ' - 'would\n' - 'otherwise be required in the equivalent yield expression ' - 'statement.\n' - 'For example, the yield statements\n' - '\n' - ' yield \n' - ' yield from \n' - '\n' - 'are equivalent to the yield expression statements\n' - '\n' - ' (yield )\n' - ' (yield from )\n' - '\n' - 'Yield expressions and statements are only used when defining a\n' - '*generator* function, and are only used in the body of the ' - 'generator\n' - 'function. Using "yield" in a function definition is sufficient to\n' - 'cause that definition to create a generator function instead of a\n' - 'normal function.\n' - '\n' - 'For full details of "yield" semantics, refer to the Yield ' - 'expressions\n' - 'section.\n'} + +topics = { + 'assert': r'''The "assert" statement +********************** + +Assert statements are a convenient way to insert debugging assertions +into a program: + + assert_stmt ::= "assert" expression ["," expression] + +The simple form, "assert expression", is equivalent to + + if __debug__: + if not expression: raise AssertionError + +The extended form, "assert expression1, expression2", is equivalent to + + if __debug__: + if not expression1: raise AssertionError(expression2) + +These equivalences assume that "__debug__" and "AssertionError" refer +to the built-in variables with those names. In the current +implementation, the built-in variable "__debug__" is "True" under +normal circumstances, "False" when optimization is requested (command +line option "-O"). The current code generator emits no code for an +"assert" statement when optimization is requested at compile time. +Note that it is unnecessary to include the source code for the +expression that failed in the error message; it will be displayed as +part of the stack trace. + +Assignments to "__debug__" are illegal. The value for the built-in +variable is determined when the interpreter starts. +''', + 'assignment': r'''Assignment statements +********************* + +Assignment statements are used to (re)bind names to values and to +modify attributes or items of mutable objects: + + assignment_stmt ::= (target_list "=")+ (starred_expression | yield_expression) + target_list ::= target ("," target)* [","] + target ::= identifier + | "(" [target_list] ")" + | "[" [target_list] "]" + | attributeref + | subscription + | slicing + | "*" target + +(See section Primaries for the syntax definitions for *attributeref*, +*subscription*, and *slicing*.) + +An assignment statement evaluates the expression list (remember that +this can be a single expression or a comma-separated list, the latter +yielding a tuple) and assigns the single resulting object to each of +the target lists, from left to right. + +Assignment is defined recursively depending on the form of the target +(list). When a target is part of a mutable object (an attribute +reference, subscription or slicing), the mutable object must +ultimately perform the assignment and decide about its validity, and +may raise an exception if the assignment is unacceptable. The rules +observed by various types and the exceptions raised are given with the +definition of the object types (see section The standard type +hierarchy). + +Assignment of an object to a target list, optionally enclosed in +parentheses or square brackets, is recursively defined as follows. + +* If the target list is a single target with no trailing comma, + optionally in parentheses, the object is assigned to that target. + +* Else: + + * If the target list contains one target prefixed with an asterisk, + called a “starred†target: The object must be an iterable with at + least as many items as there are targets in the target list, minus + one. The first items of the iterable are assigned, from left to + right, to the targets before the starred target. The final items + of the iterable are assigned to the targets after the starred + target. A list of the remaining items in the iterable is then + assigned to the starred target (the list can be empty). + + * Else: The object must be an iterable with the same number of items + as there are targets in the target list, and the items are + assigned, from left to right, to the corresponding targets. + +Assignment of an object to a single target is recursively defined as +follows. + +* If the target is an identifier (name): + + * If the name does not occur in a "global" or "nonlocal" statement + in the current code block: the name is bound to the object in the + current local namespace. + + * Otherwise: the name is bound to the object in the global namespace + or the outer namespace determined by "nonlocal", respectively. + + The name is rebound if it was already bound. This may cause the + reference count for the object previously bound to the name to reach + zero, causing the object to be deallocated and its destructor (if it + has one) to be called. + +* If the target is an attribute reference: The primary expression in + the reference is evaluated. It should yield an object with + assignable attributes; if this is not the case, "TypeError" is + raised. That object is then asked to assign the assigned object to + the given attribute; if it cannot perform the assignment, it raises + an exception (usually but not necessarily "AttributeError"). + + Note: If the object is a class instance and the attribute reference + occurs on both sides of the assignment operator, the right-hand side + expression, "a.x" can access either an instance attribute or (if no + instance attribute exists) a class attribute. The left-hand side + target "a.x" is always set as an instance attribute, creating it if + necessary. Thus, the two occurrences of "a.x" do not necessarily + refer to the same attribute: if the right-hand side expression + refers to a class attribute, the left-hand side creates a new + instance attribute as the target of the assignment: + + class Cls: + x = 3 # class variable + inst = Cls() + inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3 + + This description does not necessarily apply to descriptor + attributes, such as properties created with "property()". + +* If the target is a subscription: The primary expression in the + reference is evaluated. It should yield either a mutable sequence + object (such as a list) or a mapping object (such as a dictionary). + Next, the subscript expression is evaluated. + + If the primary is a mutable sequence object (such as a list), the + subscript must yield an integer. If it is negative, the sequence’s + length is added to it. The resulting value must be a nonnegative + integer less than the sequence’s length, and the sequence is asked + to assign the assigned object to its item with that index. If the + index is out of range, "IndexError" is raised (assignment to a + subscripted sequence cannot add new items to a list). + + If the primary is a mapping object (such as a dictionary), the + subscript must have a type compatible with the mapping’s key type, + and the mapping is then asked to create a key/value pair which maps + the subscript to the assigned object. This can either replace an + existing key/value pair with the same key value, or insert a new + key/value pair (if no key with the same value existed). + + For user-defined objects, the "__setitem__()" method is called with + appropriate arguments. + +* If the target is a slicing: The primary expression in the reference + is evaluated. It should yield a mutable sequence object (such as a + list). The assigned object should be a sequence object of the same + type. Next, the lower and upper bound expressions are evaluated, + insofar they are present; defaults are zero and the sequence’s + length. The bounds should evaluate to integers. If either bound is + negative, the sequence’s length is added to it. The resulting + bounds are clipped to lie between zero and the sequence’s length, + inclusive. Finally, the sequence object is asked to replace the + slice with the items of the assigned sequence. The length of the + slice may be different from the length of the assigned sequence, + thus changing the length of the target sequence, if the target + sequence allows it. + +**CPython implementation detail:** In the current implementation, the +syntax for targets is taken to be the same as for expressions, and +invalid syntax is rejected during the code generation phase, causing +less detailed error messages. + +Although the definition of assignment implies that overlaps between +the left-hand side and the right-hand side are ‘simultaneous’ (for +example "a, b = b, a" swaps two variables), overlaps *within* the +collection of assigned-to variables occur left-to-right, sometimes +resulting in confusion. For instance, the following program prints +"[0, 2]": + + x = [0, 1] + i = 0 + i, x[i] = 1, 2 # i is updated, then x[i] is updated + print(x) + +See also: + + **PEP 3132** - Extended Iterable Unpacking + The specification for the "*target" feature. + + +Augmented assignment statements +=============================== + +Augmented assignment is the combination, in a single statement, of a +binary operation and an assignment statement: + + augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression) + augtarget ::= identifier | attributeref | subscription | slicing + augop ::= "+=" | "-=" | "*=" | "@=" | "/=" | "//=" | "%=" | "**=" + | ">>=" | "<<=" | "&=" | "^=" | "|=" + +(See section Primaries for the syntax definitions of the last three +symbols.) + +An augmented assignment evaluates the target (which, unlike normal +assignment statements, cannot be an unpacking) and the expression +list, performs the binary operation specific to the type of assignment +on the two operands, and assigns the result to the original target. +The target is only evaluated once. + +An augmented assignment statement like "x += 1" can be rewritten as "x += x + 1" to achieve a similar, but not exactly equal effect. In the +augmented version, "x" is only evaluated once. Also, when possible, +the actual operation is performed *in-place*, meaning that rather than +creating a new object and assigning that to the target, the old object +is modified instead. + +Unlike normal assignments, augmented assignments evaluate the left- +hand side *before* evaluating the right-hand side. For example, "a[i] ++= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and performs +the addition, and lastly, it writes the result back to "a[i]". + +With the exception of assigning to tuples and multiple targets in a +single statement, the assignment done by augmented assignment +statements is handled the same way as normal assignments. Similarly, +with the exception of the possible *in-place* behavior, the binary +operation performed by augmented assignment is the same as the normal +binary operations. + +For targets which are attribute references, the same caveat about +class and instance attributes applies as for regular assignments. + + +Annotated assignment statements +=============================== + +*Annotation* assignment is the combination, in a single statement, of +a variable or attribute annotation and an optional assignment +statement: + + annotated_assignment_stmt ::= augtarget ":" expression + ["=" (starred_expression | yield_expression)] + +The difference from normal Assignment statements is that only a single +target is allowed. + +The assignment target is considered “simple†if it consists of a +single name that is not enclosed in parentheses. For simple assignment +targets, if in class or module scope, the annotations are gathered in +a lazily evaluated annotation scope. The annotations can be evaluated +using the "__annotations__" attribute of a class or module, or using +the facilities in the "annotationlib" module. + +If the assignment target is not simple (an attribute, subscript node, +or parenthesized name), the annotation is never evaluated. + +If a name is annotated in a function scope, then this name is local +for that scope. Annotations are never evaluated and stored in function +scopes. + +If the right hand side is present, an annotated assignment performs +the actual assignment as if there was no annotation present. If the +right hand side is not present for an expression target, then the +interpreter evaluates the target except for the last "__setitem__()" +or "__setattr__()" call. + +See also: + + **PEP 526** - Syntax for Variable Annotations + The proposal that added syntax for annotating the types of + variables (including class variables and instance variables), + instead of expressing them through comments. + + **PEP 484** - Type hints + The proposal that added the "typing" module to provide a standard + syntax for type annotations that can be used in static analysis + tools and IDEs. + +Changed in version 3.8: Now annotated assignments allow the same +expressions in the right hand side as regular assignments. Previously, +some expressions (like un-parenthesized tuple expressions) caused a +syntax error. + +Changed in version 3.14: Annotations are now lazily evaluated in a +separate annotation scope. If the assignment target is not simple, +annotations are never evaluated. +''', + 'assignment-expressions': r'''Assignment expressions +********************** + + assignment_expression ::= [identifier ":="] expression + +An assignment expression (sometimes also called a “named expression†+or “walrusâ€) assigns an "expression" to an "identifier", while also +returning the value of the "expression". + +One common use case is when handling matched regular expressions: + + if matching := pattern.search(data): + do_something(matching) + +Or, when processing a file stream in chunks: + + while chunk := file.read(9000): + process(chunk) + +Assignment expressions must be surrounded by parentheses when used as +expression statements and when used as sub-expressions in slicing, +conditional, lambda, keyword-argument, and comprehension-if +expressions and in "assert", "with", and "assignment" statements. In +all other places where they can be used, parentheses are not required, +including in "if" and "while" statements. + +Added in version 3.8: See **PEP 572** for more details about +assignment expressions. +''', + 'async': r'''Coroutines +********** + +Added in version 3.5. + + +Coroutine function definition +============================= + + async_funcdef ::= [decorators] "async" "def" funcname "(" [parameter_list] ")" + ["->" expression] ":" suite + +Execution of Python coroutines can be suspended and resumed at many +points (see *coroutine*). "await" expressions, "async for" and "async +with" can only be used in the body of a coroutine function. + +Functions defined with "async def" syntax are always coroutine +functions, even if they do not contain "await" or "async" keywords. + +It is a "SyntaxError" to use a "yield from" expression inside the body +of a coroutine function. + +An example of a coroutine function: + + async def func(param1, param2): + do_stuff() + await some_coroutine() + +Changed in version 3.7: "await" and "async" are now keywords; +previously they were only treated as such inside the body of a +coroutine function. + + +The "async for" statement +========================= + + async_for_stmt ::= "async" for_stmt + +An *asynchronous iterable* provides an "__aiter__" method that +directly returns an *asynchronous iterator*, which can call +asynchronous code in its "__anext__" method. + +The "async for" statement allows convenient iteration over +asynchronous iterables. + +The following code: + + async for TARGET in ITER: + SUITE + else: + SUITE2 + +Is semantically equivalent to: + + iter = (ITER) + iter = type(iter).__aiter__(iter) + running = True + + while running: + try: + TARGET = await type(iter).__anext__(iter) + except StopAsyncIteration: + running = False + else: + SUITE + else: + SUITE2 + +See also "__aiter__()" and "__anext__()" for details. + +It is a "SyntaxError" to use an "async for" statement outside the body +of a coroutine function. + + +The "async with" statement +========================== + + async_with_stmt ::= "async" with_stmt + +An *asynchronous context manager* is a *context manager* that is able +to suspend execution in its *enter* and *exit* methods. + +The following code: + + async with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + aenter = type(manager).__aenter__ + aexit = type(manager).__aexit__ + value = await aenter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not await aexit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + await aexit(manager, None, None, None) + +See also "__aenter__()" and "__aexit__()" for details. + +It is a "SyntaxError" to use an "async with" statement outside the +body of a coroutine function. + +See also: + + **PEP 492** - Coroutines with async and await syntax + The proposal that made coroutines a proper standalone concept in + Python, and added supporting syntax. +''', + 'atom-identifiers': r'''Identifiers (Names) +******************* + +An identifier occurring as an atom is a name. See section Identifiers +and keywords for lexical definition and section Naming and binding for +documentation of naming and binding. + +When the name is bound to an object, evaluation of the atom yields +that object. When a name is not bound, an attempt to evaluate it +raises a "NameError" exception. + + +Private name mangling +===================== + +When an identifier that textually occurs in a class definition begins +with two or more underscore characters and does not end in two or more +underscores, it is considered a *private name* of that class. + +See also: The class specifications. + +More precisely, private names are transformed to a longer form before +code is generated for them. If the transformed name is longer than +255 characters, implementation-defined truncation may happen. + +The transformation is independent of the syntactical context in which +the identifier is used but only the following private identifiers are +mangled: + +* Any name used as the name of a variable that is assigned or read or + any name of an attribute being accessed. + + The "__name__" attribute of nested functions, classes, and type + aliases is however not mangled. + +* The name of imported modules, e.g., "__spam" in "import __spam". If + the module is part of a package (i.e., its name contains a dot), the + name is *not* mangled, e.g., the "__foo" in "import __foo.bar" is + not mangled. + +* The name of an imported member, e.g., "__f" in "from spam import + __f". + +The transformation rule is defined as follows: + +* The class name, with leading underscores removed and a single + leading underscore inserted, is inserted in front of the identifier, + e.g., the identifier "__spam" occurring in a class named "Foo", + "_Foo" or "__Foo" is transformed to "_Foo__spam". + +* If the class name consists only of underscores, the transformation + is the identity, e.g., the identifier "__spam" occurring in a class + named "_" or "__" is left as is. +''', + 'atom-literals': r'''Literals +******** + +Python supports string and bytes literals and various numeric +literals: + + literal ::= stringliteral | bytesliteral + | integer | floatnumber | imagnumber + +Evaluation of a literal yields an object of the given type (string, +bytes, integer, floating-point number, complex number) with the given +value. The value may be approximated in the case of floating-point +and imaginary (complex) literals. See section Literals for details. + +All literals correspond to immutable data types, and hence the +object’s identity is less important than its value. Multiple +evaluations of literals with the same value (either the same +occurrence in the program text or a different occurrence) may obtain +the same object or a different object with the same value. +''', + 'attribute-access': r'''Customizing attribute access +**************************** + +The following methods can be defined to customize the meaning of +attribute access (use of, assignment to, or deletion of "x.name") for +class instances. + +object.__getattr__(self, name) + + Called when the default attribute access fails with an + "AttributeError" (either "__getattribute__()" raises an + "AttributeError" because *name* is not an instance attribute or an + attribute in the class tree for "self"; or "__get__()" of a *name* + property raises "AttributeError"). This method should either + return the (computed) attribute value or raise an "AttributeError" + exception. The "object" class itself does not provide this method. + + Note that if the attribute is found through the normal mechanism, + "__getattr__()" is not called. (This is an intentional asymmetry + between "__getattr__()" and "__setattr__()".) This is done both for + efficiency reasons and because otherwise "__getattr__()" would have + no way to access other attributes of the instance. Note that at + least for instance variables, you can take total control by not + inserting any values in the instance attribute dictionary (but + instead inserting them in another object). See the + "__getattribute__()" method below for a way to actually get total + control over attribute access. + +object.__getattribute__(self, name) + + Called unconditionally to implement attribute accesses for + instances of the class. If the class also defines "__getattr__()", + the latter will not be called unless "__getattribute__()" either + calls it explicitly or raises an "AttributeError". This method + should return the (computed) attribute value or raise an + "AttributeError" exception. In order to avoid infinite recursion in + this method, its implementation should always call the base class + method with the same name to access any attributes it needs, for + example, "object.__getattribute__(self, name)". + + Note: + + This method may still be bypassed when looking up special methods + as the result of implicit invocation via language syntax or + built-in functions. See Special method lookup. + + For certain sensitive attribute accesses, raises an auditing event + "object.__getattr__" with arguments "obj" and "name". + +object.__setattr__(self, name, value) + + Called when an attribute assignment is attempted. This is called + instead of the normal mechanism (i.e. store the value in the + instance dictionary). *name* is the attribute name, *value* is the + value to be assigned to it. + + If "__setattr__()" wants to assign to an instance attribute, it + should call the base class method with the same name, for example, + "object.__setattr__(self, name, value)". + + For certain sensitive attribute assignments, raises an auditing + event "object.__setattr__" with arguments "obj", "name", "value". + +object.__delattr__(self, name) + + Like "__setattr__()" but for attribute deletion instead of + assignment. This should only be implemented if "del obj.name" is + meaningful for the object. + + For certain sensitive attribute deletions, raises an auditing event + "object.__delattr__" with arguments "obj" and "name". + +object.__dir__(self) + + Called when "dir()" is called on the object. An iterable must be + returned. "dir()" converts the returned iterable to a list and + sorts it. + + +Customizing module attribute access +=================================== + +Special names "__getattr__" and "__dir__" can be also used to +customize access to module attributes. The "__getattr__" function at +the module level should accept one argument which is the name of an +attribute and return the computed value or raise an "AttributeError". +If an attribute is not found on a module object through the normal +lookup, i.e. "object.__getattribute__()", then "__getattr__" is +searched in the module "__dict__" before raising an "AttributeError". +If found, it is called with the attribute name and the result is +returned. + +The "__dir__" function should accept no arguments, and return an +iterable of strings that represents the names accessible on module. If +present, this function overrides the standard "dir()" search on a +module. + +For a more fine grained customization of the module behavior (setting +attributes, properties, etc.), one can set the "__class__" attribute +of a module object to a subclass of "types.ModuleType". For example: + + import sys + from types import ModuleType + + class VerboseModule(ModuleType): + def __repr__(self): + return f'Verbose {self.__name__}' + + def __setattr__(self, attr, value): + print(f'Setting {attr}...') + super().__setattr__(attr, value) + + sys.modules[__name__].__class__ = VerboseModule + +Note: + + Defining module "__getattr__" and setting module "__class__" only + affect lookups made using the attribute access syntax – directly + accessing the module globals (whether by code within the module, or + via a reference to the module’s globals dictionary) is unaffected. + +Changed in version 3.5: "__class__" module attribute is now writable. + +Added in version 3.7: "__getattr__" and "__dir__" module attributes. + +See also: + + **PEP 562** - Module __getattr__ and __dir__ + Describes the "__getattr__" and "__dir__" functions on modules. + + +Implementing Descriptors +======================== + +The following methods only apply when an instance of the class +containing the method (a so-called *descriptor* class) appears in an +*owner* class (the descriptor must be in either the owner’s class +dictionary or in the class dictionary for one of its parents). In the +examples below, “the attribute†refers to the attribute whose name is +the key of the property in the owner class’ "__dict__". The "object" +class itself does not implement any of these protocols. + +object.__get__(self, instance, owner=None) + + Called to get the attribute of the owner class (class attribute + access) or of an instance of that class (instance attribute + access). The optional *owner* argument is the owner class, while + *instance* is the instance that the attribute was accessed through, + or "None" when the attribute is accessed through the *owner*. + + This method should return the computed attribute value or raise an + "AttributeError" exception. + + **PEP 252** specifies that "__get__()" is callable with one or two + arguments. Python’s own built-in descriptors support this + specification; however, it is likely that some third-party tools + have descriptors that require both arguments. Python’s own + "__getattribute__()" implementation always passes in both arguments + whether they are required or not. + +object.__set__(self, instance, value) + + Called to set the attribute on an instance *instance* of the owner + class to a new value, *value*. + + Note, adding "__set__()" or "__delete__()" changes the kind of + descriptor to a “data descriptorâ€. See Invoking Descriptors for + more details. + +object.__delete__(self, instance) + + Called to delete the attribute on an instance *instance* of the + owner class. + +Instances of descriptors may also have the "__objclass__" attribute +present: + +object.__objclass__ + + The attribute "__objclass__" is interpreted by the "inspect" module + as specifying the class where this object was defined (setting this + appropriately can assist in runtime introspection of dynamic class + attributes). For callables, it may indicate that an instance of the + given type (or a subclass) is expected or required as the first + positional argument (for example, CPython sets this attribute for + unbound methods that are implemented in C). + + +Invoking Descriptors +==================== + +In general, a descriptor is an object attribute with “binding +behaviorâ€, one whose attribute access has been overridden by methods +in the descriptor protocol: "__get__()", "__set__()", and +"__delete__()". If any of those methods are defined for an object, it +is said to be a descriptor. + +The default behavior for attribute access is to get, set, or delete +the attribute from an object’s dictionary. For instance, "a.x" has a +lookup chain starting with "a.__dict__['x']", then +"type(a).__dict__['x']", and continuing through the base classes of +"type(a)" excluding metaclasses. + +However, if the looked-up value is an object defining one of the +descriptor methods, then Python may override the default behavior and +invoke the descriptor method instead. Where this occurs in the +precedence chain depends on which descriptor methods were defined and +how they were called. + +The starting point for descriptor invocation is a binding, "a.x". How +the arguments are assembled depends on "a": + +Direct Call + The simplest and least common call is when user code directly + invokes a descriptor method: "x.__get__(a)". + +Instance Binding + If binding to an object instance, "a.x" is transformed into the + call: "type(a).__dict__['x'].__get__(a, type(a))". + +Class Binding + If binding to a class, "A.x" is transformed into the call: + "A.__dict__['x'].__get__(None, A)". + +Super Binding + A dotted lookup such as "super(A, a).x" searches + "a.__class__.__mro__" for a base class "B" following "A" and then + returns "B.__dict__['x'].__get__(a, A)". If not a descriptor, "x" + is returned unchanged. + +For instance bindings, the precedence of descriptor invocation depends +on which descriptor methods are defined. A descriptor can define any +combination of "__get__()", "__set__()" and "__delete__()". If it +does not define "__get__()", then accessing the attribute will return +the descriptor object itself unless there is a value in the object’s +instance dictionary. If the descriptor defines "__set__()" and/or +"__delete__()", it is a data descriptor; if it defines neither, it is +a non-data descriptor. Normally, data descriptors define both +"__get__()" and "__set__()", while non-data descriptors have just the +"__get__()" method. Data descriptors with "__get__()" and "__set__()" +(and/or "__delete__()") defined always override a redefinition in an +instance dictionary. In contrast, non-data descriptors can be +overridden by instances. + +Python methods (including those decorated with "@staticmethod" and +"@classmethod") are implemented as non-data descriptors. Accordingly, +instances can redefine and override methods. This allows individual +instances to acquire behaviors that differ from other instances of the +same class. + +The "property()" function is implemented as a data descriptor. +Accordingly, instances cannot override the behavior of a property. + + +__slots__ +========= + +*__slots__* allow us to explicitly declare data members (like +properties) and deny the creation of "__dict__" and *__weakref__* +(unless explicitly declared in *__slots__* or available in a parent.) + +The space saved over using "__dict__" can be significant. Attribute +lookup speed can be significantly improved as well. + +object.__slots__ + + This class variable can be assigned a string, iterable, or sequence + of strings with variable names used by instances. *__slots__* + reserves space for the declared variables and prevents the + automatic creation of "__dict__" and *__weakref__* for each + instance. + +Notes on using *__slots__*: + +* When inheriting from a class without *__slots__*, the "__dict__" and + *__weakref__* attribute of the instances will always be accessible. + +* Without a "__dict__" variable, instances cannot be assigned new + variables not listed in the *__slots__* definition. Attempts to + assign to an unlisted variable name raises "AttributeError". If + dynamic assignment of new variables is desired, then add + "'__dict__'" to the sequence of strings in the *__slots__* + declaration. + +* Without a *__weakref__* variable for each instance, classes defining + *__slots__* do not support "weak references" to its instances. If + weak reference support is needed, then add "'__weakref__'" to the + sequence of strings in the *__slots__* declaration. + +* *__slots__* are implemented at the class level by creating + descriptors for each variable name. As a result, class attributes + cannot be used to set default values for instance variables defined + by *__slots__*; otherwise, the class attribute would overwrite the + descriptor assignment. + +* The action of a *__slots__* declaration is not limited to the class + where it is defined. *__slots__* declared in parents are available + in child classes. However, instances of a child subclass will get a + "__dict__" and *__weakref__* unless the subclass also defines + *__slots__* (which should only contain names of any *additional* + slots). + +* If a class defines a slot also defined in a base class, the instance + variable defined by the base class slot is inaccessible (except by + retrieving its descriptor directly from the base class). This + renders the meaning of the program undefined. In the future, a + check may be added to prevent this. + +* "TypeError" will be raised if nonempty *__slots__* are defined for a + class derived from a ""variable-length" built-in type" such as + "int", "bytes", and "tuple". + +* Any non-string *iterable* may be assigned to *__slots__*. + +* If a "dictionary" is used to assign *__slots__*, the dictionary keys + will be used as the slot names. The values of the dictionary can be + used to provide per-attribute docstrings that will be recognised by + "inspect.getdoc()" and displayed in the output of "help()". + +* "__class__" assignment works only if both classes have the same + *__slots__*. + +* Multiple inheritance with multiple slotted parent classes can be + used, but only one parent is allowed to have attributes created by + slots (the other bases must have empty slot layouts) - violations + raise "TypeError". + +* If an *iterator* is used for *__slots__* then a *descriptor* is + created for each of the iterator’s values. However, the *__slots__* + attribute will be an empty iterator. +''', + 'attribute-references': r'''Attribute references +******************** + +An attribute reference is a primary followed by a period and a name: + + attributeref ::= primary "." identifier + +The primary must evaluate to an object of a type that supports +attribute references, which most objects do. This object is then +asked to produce the attribute whose name is the identifier. The type +and value produced is determined by the object. Multiple evaluations +of the same attribute reference may yield different objects. + +This production can be customized by overriding the +"__getattribute__()" method or the "__getattr__()" method. The +"__getattribute__()" method is called first and either returns a value +or raises "AttributeError" if the attribute is not available. + +If an "AttributeError" is raised and the object has a "__getattr__()" +method, that method is called as a fallback. +''', + 'augassign': r'''Augmented assignment statements +******************************* + +Augmented assignment is the combination, in a single statement, of a +binary operation and an assignment statement: + + augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression) + augtarget ::= identifier | attributeref | subscription | slicing + augop ::= "+=" | "-=" | "*=" | "@=" | "/=" | "//=" | "%=" | "**=" + | ">>=" | "<<=" | "&=" | "^=" | "|=" + +(See section Primaries for the syntax definitions of the last three +symbols.) + +An augmented assignment evaluates the target (which, unlike normal +assignment statements, cannot be an unpacking) and the expression +list, performs the binary operation specific to the type of assignment +on the two operands, and assigns the result to the original target. +The target is only evaluated once. + +An augmented assignment statement like "x += 1" can be rewritten as "x += x + 1" to achieve a similar, but not exactly equal effect. In the +augmented version, "x" is only evaluated once. Also, when possible, +the actual operation is performed *in-place*, meaning that rather than +creating a new object and assigning that to the target, the old object +is modified instead. + +Unlike normal assignments, augmented assignments evaluate the left- +hand side *before* evaluating the right-hand side. For example, "a[i] ++= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and performs +the addition, and lastly, it writes the result back to "a[i]". + +With the exception of assigning to tuples and multiple targets in a +single statement, the assignment done by augmented assignment +statements is handled the same way as normal assignments. Similarly, +with the exception of the possible *in-place* behavior, the binary +operation performed by augmented assignment is the same as the normal +binary operations. + +For targets which are attribute references, the same caveat about +class and instance attributes applies as for regular assignments. +''', + 'await': r'''Await expression +**************** + +Suspend the execution of *coroutine* on an *awaitable* object. Can +only be used inside a *coroutine function*. + + await_expr ::= "await" primary + +Added in version 3.5. +''', + 'binary': r'''Binary arithmetic operations +**************************** + +The binary arithmetic operations have the conventional priority +levels. Note that some of these operations also apply to certain non- +numeric types. Apart from the power operator, there are only two +levels, one for multiplicative operators and one for additive +operators: + + m_expr ::= u_expr | m_expr "*" u_expr | m_expr "@" m_expr | + m_expr "//" u_expr | m_expr "/" u_expr | + m_expr "%" u_expr + a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr + +The "*" (multiplication) operator yields the product of its arguments. +The arguments must either both be numbers, or one argument must be an +integer and the other must be a sequence. In the former case, the +numbers are converted to a common real type and then multiplied +together. In the latter case, sequence repetition is performed; a +negative repetition factor yields an empty sequence. + +This operation can be customized using the special "__mul__()" and +"__rmul__()" methods. + +Changed in version 3.14: If only one operand is a complex number, the +other operand is converted to a floating-point number. + +The "@" (at) operator is intended to be used for matrix +multiplication. No builtin Python types implement this operator. + +This operation can be customized using the special "__matmul__()" and +"__rmatmul__()" methods. + +Added in version 3.5. + +The "/" (division) and "//" (floor division) operators yield the +quotient of their arguments. The numeric arguments are first +converted to a common type. Division of integers yields a float, while +floor division of integers results in an integer; the result is that +of mathematical division with the ‘floor’ function applied to the +result. Division by zero raises the "ZeroDivisionError" exception. + +The division operation can be customized using the special +"__truediv__()" and "__rtruediv__()" methods. The floor division +operation can be customized using the special "__floordiv__()" and +"__rfloordiv__()" methods. + +The "%" (modulo) operator yields the remainder from the division of +the first argument by the second. The numeric arguments are first +converted to a common type. A zero right argument raises the +"ZeroDivisionError" exception. The arguments may be floating-point +numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals "4*0.7 + +0.34".) The modulo operator always yields a result with the same sign +as its second operand (or zero); the absolute value of the result is +strictly smaller than the absolute value of the second operand [1]. + +The floor division and modulo operators are connected by the following +identity: "x == (x//y)*y + (x%y)". Floor division and modulo are also +connected with the built-in function "divmod()": "divmod(x, y) == +(x//y, x%y)". [2]. + +In addition to performing the modulo operation on numbers, the "%" +operator is also overloaded by string objects to perform old-style +string formatting (also known as interpolation). The syntax for +string formatting is described in the Python Library Reference, +section printf-style String Formatting. + +The *modulo* operation can be customized using the special "__mod__()" +and "__rmod__()" methods. + +The floor division operator, the modulo operator, and the "divmod()" +function are not defined for complex numbers. Instead, convert to a +floating-point number using the "abs()" function if appropriate. + +The "+" (addition) operator yields the sum of its arguments. The +arguments must either both be numbers or both be sequences of the same +type. In the former case, the numbers are converted to a common real +type and then added together. In the latter case, the sequences are +concatenated. + +This operation can be customized using the special "__add__()" and +"__radd__()" methods. + +Changed in version 3.14: If only one operand is a complex number, the +other operand is converted to a floating-point number. + +The "-" (subtraction) operator yields the difference of its arguments. +The numeric arguments are first converted to a common real type. + +This operation can be customized using the special "__sub__()" and +"__rsub__()" methods. + +Changed in version 3.14: If only one operand is a complex number, the +other operand is converted to a floating-point number. +''', + 'bitwise': r'''Binary bitwise operations +************************* + +Each of the three bitwise operations has a different priority level: + + and_expr ::= shift_expr | and_expr "&" shift_expr + xor_expr ::= and_expr | xor_expr "^" and_expr + or_expr ::= xor_expr | or_expr "|" xor_expr + +The "&" operator yields the bitwise AND of its arguments, which must +be integers or one of them must be a custom object overriding +"__and__()" or "__rand__()" special methods. + +The "^" operator yields the bitwise XOR (exclusive OR) of its +arguments, which must be integers or one of them must be a custom +object overriding "__xor__()" or "__rxor__()" special methods. + +The "|" operator yields the bitwise (inclusive) OR of its arguments, +which must be integers or one of them must be a custom object +overriding "__or__()" or "__ror__()" special methods. +''', + 'bltin-code-objects': r'''Code Objects +************ + +Code objects are used by the implementation to represent “pseudo- +compiled†executable Python code such as a function body. They differ +from function objects because they don’t contain a reference to their +global execution environment. Code objects are returned by the built- +in "compile()" function and can be extracted from function objects +through their "__code__" attribute. See also the "code" module. + +Accessing "__code__" raises an auditing event "object.__getattr__" +with arguments "obj" and ""__code__"". + +A code object can be executed or evaluated by passing it (instead of a +source string) to the "exec()" or "eval()" built-in functions. + +See The standard type hierarchy for more information. +''', + 'bltin-ellipsis-object': r'''The Ellipsis Object +******************* + +This object is commonly used by slicing (see Slicings). It supports +no special operations. There is exactly one ellipsis object, named +"Ellipsis" (a built-in name). "type(Ellipsis)()" produces the +"Ellipsis" singleton. + +It is written as "Ellipsis" or "...". +''', + 'bltin-null-object': r'''The Null Object +*************** + +This object is returned by functions that don’t explicitly return a +value. It supports no special operations. There is exactly one null +object, named "None" (a built-in name). "type(None)()" produces the +same singleton. + +It is written as "None". +''', + 'bltin-type-objects': r'''Type Objects +************ + +Type objects represent the various object types. An object’s type is +accessed by the built-in function "type()". There are no special +operations on types. The standard module "types" defines names for +all standard built-in types. + +Types are written like this: "". +''', + 'booleans': r'''Boolean operations +****************** + + or_test ::= and_test | or_test "or" and_test + and_test ::= not_test | and_test "and" not_test + not_test ::= comparison | "not" not_test + +In the context of Boolean operations, and also when expressions are +used by control flow statements, the following values are interpreted +as false: "False", "None", numeric zero of all types, and empty +strings and containers (including strings, tuples, lists, +dictionaries, sets and frozensets). All other values are interpreted +as true. User-defined objects can customize their truth value by +providing a "__bool__()" method. + +The operator "not" yields "True" if its argument is false, "False" +otherwise. + +The expression "x and y" first evaluates *x*; if *x* is false, its +value is returned; otherwise, *y* is evaluated and the resulting value +is returned. + +The expression "x or y" first evaluates *x*; if *x* is true, its value +is returned; otherwise, *y* is evaluated and the resulting value is +returned. + +Note that neither "and" nor "or" restrict the value and type they +return to "False" and "True", but rather return the last evaluated +argument. This is sometimes useful, e.g., if "s" is a string that +should be replaced by a default value if it is empty, the expression +"s or 'foo'" yields the desired value. Because "not" has to create a +new value, it returns a boolean value regardless of the type of its +argument (for example, "not 'foo'" produces "False" rather than "''".) +''', + 'break': r'''The "break" statement +********************* + + break_stmt ::= "break" + +"break" may only occur syntactically nested in a "for" or "while" +loop, but not nested in a function or class definition within that +loop. + +It terminates the nearest enclosing loop, skipping the optional "else" +clause if the loop has one. + +If a "for" loop is terminated by "break", the loop control target +keeps its current value. + +When "break" passes control out of a "try" statement with a "finally" +clause, that "finally" clause is executed before really leaving the +loop. +''', + 'callable-types': r'''Emulating callable objects +************************** + +object.__call__(self[, args...]) + + Called when the instance is “called†as a function; if this method + is defined, "x(arg1, arg2, ...)" roughly translates to + "type(x).__call__(x, arg1, ...)". The "object" class itself does + not provide this method. +''', + 'calls': r'''Calls +***** + +A call calls a callable object (e.g., a *function*) with a possibly +empty series of *arguments*: + + call ::= primary "(" [argument_list [","] | comprehension] ")" + argument_list ::= positional_arguments ["," starred_and_keywords] + ["," keywords_arguments] + | starred_and_keywords ["," keywords_arguments] + | keywords_arguments + positional_arguments ::= positional_item ("," positional_item)* + positional_item ::= assignment_expression | "*" expression + starred_and_keywords ::= ("*" expression | keyword_item) + ("," "*" expression | "," keyword_item)* + keywords_arguments ::= (keyword_item | "**" expression) + ("," keyword_item | "," "**" expression)* + keyword_item ::= identifier "=" expression + +An optional trailing comma may be present after the positional and +keyword arguments but does not affect the semantics. + +The primary must evaluate to a callable object (user-defined +functions, built-in functions, methods of built-in objects, class +objects, methods of class instances, and all objects having a +"__call__()" method are callable). All argument expressions are +evaluated before the call is attempted. Please refer to section +Function definitions for the syntax of formal *parameter* lists. + +If keyword arguments are present, they are first converted to +positional arguments, as follows. First, a list of unfilled slots is +created for the formal parameters. If there are N positional +arguments, they are placed in the first N slots. Next, for each +keyword argument, the identifier is used to determine the +corresponding slot (if the identifier is the same as the first formal +parameter name, the first slot is used, and so on). If the slot is +already filled, a "TypeError" exception is raised. Otherwise, the +argument is placed in the slot, filling it (even if the expression is +"None", it fills the slot). When all arguments have been processed, +the slots that are still unfilled are filled with the corresponding +default value from the function definition. (Default values are +calculated, once, when the function is defined; thus, a mutable object +such as a list or dictionary used as default value will be shared by +all calls that don’t specify an argument value for the corresponding +slot; this should usually be avoided.) If there are any unfilled +slots for which no default value is specified, a "TypeError" exception +is raised. Otherwise, the list of filled slots is used as the +argument list for the call. + +**CPython implementation detail:** An implementation may provide +built-in functions whose positional parameters do not have names, even +if they are ‘named’ for the purpose of documentation, and which +therefore cannot be supplied by keyword. In CPython, this is the case +for functions implemented in C that use "PyArg_ParseTuple()" to parse +their arguments. + +If there are more positional arguments than there are formal parameter +slots, a "TypeError" exception is raised, unless a formal parameter +using the syntax "*identifier" is present; in this case, that formal +parameter receives a tuple containing the excess positional arguments +(or an empty tuple if there were no excess positional arguments). + +If any keyword argument does not correspond to a formal parameter +name, a "TypeError" exception is raised, unless a formal parameter +using the syntax "**identifier" is present; in this case, that formal +parameter receives a dictionary containing the excess keyword +arguments (using the keywords as keys and the argument values as +corresponding values), or a (new) empty dictionary if there were no +excess keyword arguments. + +If the syntax "*expression" appears in the function call, "expression" +must evaluate to an *iterable*. Elements from these iterables are +treated as if they were additional positional arguments. For the call +"f(x1, x2, *y, x3, x4)", if *y* evaluates to a sequence *y1*, …, *yM*, +this is equivalent to a call with M+4 positional arguments *x1*, *x2*, +*y1*, …, *yM*, *x3*, *x4*. + +A consequence of this is that although the "*expression" syntax may +appear *after* explicit keyword arguments, it is processed *before* +the keyword arguments (and any "**expression" arguments – see below). +So: + + >>> def f(a, b): + ... print(a, b) + ... + >>> f(b=1, *(2,)) + 2 1 + >>> f(a=1, *(2,)) + Traceback (most recent call last): + File "", line 1, in + TypeError: f() got multiple values for keyword argument 'a' + >>> f(1, *(2,)) + 1 2 + +It is unusual for both keyword arguments and the "*expression" syntax +to be used in the same call, so in practice this confusion does not +often arise. + +If the syntax "**expression" appears in the function call, +"expression" must evaluate to a *mapping*, the contents of which are +treated as additional keyword arguments. If a parameter matching a key +has already been given a value (by an explicit keyword argument, or +from another unpacking), a "TypeError" exception is raised. + +When "**expression" is used, each key in this mapping must be a +string. Each value from the mapping is assigned to the first formal +parameter eligible for keyword assignment whose name is equal to the +key. A key need not be a Python identifier (e.g. ""max-temp °F"" is +acceptable, although it will not match any formal parameter that could +be declared). If there is no match to a formal parameter the key-value +pair is collected by the "**" parameter, if there is one, or if there +is not, a "TypeError" exception is raised. + +Formal parameters using the syntax "*identifier" or "**identifier" +cannot be used as positional argument slots or as keyword argument +names. + +Changed in version 3.5: Function calls accept any number of "*" and +"**" unpackings, positional arguments may follow iterable unpackings +("*"), and keyword arguments may follow dictionary unpackings ("**"). +Originally proposed by **PEP 448**. + +A call always returns some value, possibly "None", unless it raises an +exception. How this value is computed depends on the type of the +callable object. + +If it is— + +a user-defined function: + The code block for the function is executed, passing it the + argument list. The first thing the code block will do is bind the + formal parameters to the arguments; this is described in section + Function definitions. When the code block executes a "return" + statement, this specifies the return value of the function call. + If execution reaches the end of the code block without executing a + "return" statement, the return value is "None". + +a built-in function or method: + The result is up to the interpreter; see Built-in Functions for the + descriptions of built-in functions and methods. + +a class object: + A new instance of that class is returned. + +a class instance method: + The corresponding user-defined function is called, with an argument + list that is one longer than the argument list of the call: the + instance becomes the first argument. + +a class instance: + The class must define a "__call__()" method; the effect is then the + same as if that method was called. +''', + 'class': r'''Class definitions +***************** + +A class definition defines a class object (see section The standard +type hierarchy): + + classdef ::= [decorators] "class" classname [type_params] [inheritance] ":" suite + inheritance ::= "(" [argument_list] ")" + classname ::= identifier + +A class definition is an executable statement. The inheritance list +usually gives a list of base classes (see Metaclasses for more +advanced uses), so each item in the list should evaluate to a class +object which allows subclassing. Classes without an inheritance list +inherit, by default, from the base class "object"; hence, + + class Foo: + pass + +is equivalent to + + class Foo(object): + pass + +The class’s suite is then executed in a new execution frame (see +Naming and binding), using a newly created local namespace and the +original global namespace. (Usually, the suite contains mostly +function definitions.) When the class’s suite finishes execution, its +execution frame is discarded but its local namespace is saved. [5] A +class object is then created using the inheritance list for the base +classes and the saved local namespace for the attribute dictionary. +The class name is bound to this class object in the original local +namespace. + +The order in which attributes are defined in the class body is +preserved in the new class’s "__dict__". Note that this is reliable +only right after the class is created and only for classes that were +defined using the definition syntax. + +Class creation can be customized heavily using metaclasses. + +Classes can also be decorated: just like when decorating functions, + + @f1(arg) + @f2 + class Foo: pass + +is roughly equivalent to + + class Foo: pass + Foo = f1(arg)(f2(Foo)) + +The evaluation rules for the decorator expressions are the same as for +function decorators. The result is then bound to the class name. + +Changed in version 3.9: Classes may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets immediately +after the class’s name. This indicates to static type checkers that +the class is generic. At runtime, the type parameters can be retrieved +from the class’s "__type_params__" attribute. See Generic classes for +more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +**Programmer’s note:** Variables defined in the class definition are +class attributes; they are shared by instances. Instance attributes +can be set in a method with "self.name = value". Both class and +instance attributes are accessible through the notation “"self.name"â€, +and an instance attribute hides a class attribute with the same name +when accessed in this way. Class attributes can be used as defaults +for instance attributes, but using mutable values there can lead to +unexpected results. Descriptors can be used to create instance +variables with different implementation details. + +See also: + + **PEP 3115** - Metaclasses in Python 3000 + The proposal that changed the declaration of metaclasses to the + current syntax, and the semantics for how classes with + metaclasses are constructed. + + **PEP 3129** - Class Decorators + The proposal that added class decorators. Function and method + decorators were introduced in **PEP 318**. +''', + 'comparisons': r'''Comparisons +*********** + +Unlike C, all comparison operations in Python have the same priority, +which is lower than that of any arithmetic, shifting or bitwise +operation. Also unlike C, expressions like "a < b < c" have the +interpretation that is conventional in mathematics: + + comparison ::= or_expr (comp_operator or_expr)* + comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!=" + | "is" ["not"] | ["not"] "in" + +Comparisons yield boolean values: "True" or "False". Custom *rich +comparison methods* may return non-boolean values. In this case Python +will call "bool()" on such value in boolean contexts. + +Comparisons can be chained arbitrarily, e.g., "x < y <= z" is +equivalent to "x < y and y <= z", except that "y" is evaluated only +once (but in both cases "z" is not evaluated at all when "x < y" is +found to be false). + +Formally, if *a*, *b*, *c*, …, *y*, *z* are expressions and *op1*, +*op2*, …, *opN* are comparison operators, then "a op1 b op2 c ... y +opN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except +that each expression is evaluated at most once. + +Note that "a op1 b op2 c" doesn’t imply any kind of comparison between +*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though +perhaps not pretty). + + +Value comparisons +================= + +The operators "<", ">", "==", ">=", "<=", and "!=" compare the values +of two objects. The objects do not need to have the same type. + +Chapter Objects, values and types states that objects have a value (in +addition to type and identity). The value of an object is a rather +abstract notion in Python: For example, there is no canonical access +method for an object’s value. Also, there is no requirement that the +value of an object should be constructed in a particular way, e.g. +comprised of all its data attributes. Comparison operators implement a +particular notion of what the value of an object is. One can think of +them as defining the value of an object indirectly, by means of their +comparison implementation. + +Because all types are (direct or indirect) subtypes of "object", they +inherit the default comparison behavior from "object". Types can +customize their comparison behavior by implementing *rich comparison +methods* like "__lt__()", described in Basic customization. + +The default behavior for equality comparison ("==" and "!=") is based +on the identity of the objects. Hence, equality comparison of +instances with the same identity results in equality, and equality +comparison of instances with different identities results in +inequality. A motivation for this default behavior is the desire that +all objects should be reflexive (i.e. "x is y" implies "x == y"). + +A default order comparison ("<", ">", "<=", and ">=") is not provided; +an attempt raises "TypeError". A motivation for this default behavior +is the lack of a similar invariant as for equality. + +The behavior of the default equality comparison, that instances with +different identities are always unequal, may be in contrast to what +types will need that have a sensible definition of object value and +value-based equality. Such types will need to customize their +comparison behavior, and in fact, a number of built-in types have done +that. + +The following list describes the comparison behavior of the most +important built-in types. + +* Numbers of built-in numeric types (Numeric Types — int, float, + complex) and of the standard library types "fractions.Fraction" and + "decimal.Decimal" can be compared within and across their types, + with the restriction that complex numbers do not support order + comparison. Within the limits of the types involved, they compare + mathematically (algorithmically) correct without loss of precision. + + The not-a-number values "float('NaN')" and "decimal.Decimal('NaN')" + are special. Any ordered comparison of a number to a not-a-number + value is false. A counter-intuitive implication is that not-a-number + values are not equal to themselves. For example, if "x = + float('NaN')", "3 < x", "x < 3" and "x == x" are all false, while "x + != x" is true. This behavior is compliant with IEEE 754. + +* "None" and "NotImplemented" are singletons. **PEP 8** advises that + comparisons for singletons should always be done with "is" or "is + not", never the equality operators. + +* Binary sequences (instances of "bytes" or "bytearray") can be + compared within and across their types. They compare + lexicographically using the numeric values of their elements. + +* Strings (instances of "str") compare lexicographically using the + numerical Unicode code points (the result of the built-in function + "ord()") of their characters. [3] + + Strings and binary sequences cannot be directly compared. + +* Sequences (instances of "tuple", "list", or "range") can be compared + only within each of their types, with the restriction that ranges do + not support order comparison. Equality comparison across these + types results in inequality, and ordering comparison across these + types raises "TypeError". + + Sequences compare lexicographically using comparison of + corresponding elements. The built-in containers typically assume + identical objects are equal to themselves. That lets them bypass + equality tests for identical objects to improve performance and to + maintain their internal invariants. + + Lexicographical comparison between built-in collections works as + follows: + + * For two collections to compare equal, they must be of the same + type, have the same length, and each pair of corresponding + elements must compare equal (for example, "[1,2] == (1,2)" is + false because the type is not the same). + + * Collections that support order comparison are ordered the same as + their first unequal elements (for example, "[1,2,x] <= [1,2,y]" + has the same value as "x <= y"). If a corresponding element does + not exist, the shorter collection is ordered first (for example, + "[1,2] < [1,2,3]" is true). + +* Mappings (instances of "dict") compare equal if and only if they + have equal "(key, value)" pairs. Equality comparison of the keys and + values enforces reflexivity. + + Order comparisons ("<", ">", "<=", and ">=") raise "TypeError". + +* Sets (instances of "set" or "frozenset") can be compared within and + across their types. + + They define order comparison operators to mean subset and superset + tests. Those relations do not define total orderings (for example, + the two sets "{1,2}" and "{2,3}" are not equal, nor subsets of one + another, nor supersets of one another). Accordingly, sets are not + appropriate arguments for functions which depend on total ordering + (for example, "min()", "max()", and "sorted()" produce undefined + results given a list of sets as inputs). + + Comparison of sets enforces reflexivity of its elements. + +* Most other built-in types have no comparison methods implemented, so + they inherit the default comparison behavior. + +User-defined classes that customize their comparison behavior should +follow some consistency rules, if possible: + +* Equality comparison should be reflexive. In other words, identical + objects should compare equal: + + "x is y" implies "x == y" + +* Comparison should be symmetric. In other words, the following + expressions should have the same result: + + "x == y" and "y == x" + + "x != y" and "y != x" + + "x < y" and "y > x" + + "x <= y" and "y >= x" + +* Comparison should be transitive. The following (non-exhaustive) + examples illustrate that: + + "x > y and y > z" implies "x > z" + + "x < y and y <= z" implies "x < z" + +* Inverse comparison should result in the boolean negation. In other + words, the following expressions should have the same result: + + "x == y" and "not x != y" + + "x < y" and "not x >= y" (for total ordering) + + "x > y" and "not x <= y" (for total ordering) + + The last two expressions apply to totally ordered collections (e.g. + to sequences, but not to sets or mappings). See also the + "total_ordering()" decorator. + +* The "hash()" result should be consistent with equality. Objects that + are equal should either have the same hash value, or be marked as + unhashable. + +Python does not enforce these consistency rules. In fact, the +not-a-number values are an example for not following these rules. + + +Membership test operations +========================== + +The operators "in" and "not in" test for membership. "x in s" +evaluates to "True" if *x* is a member of *s*, and "False" otherwise. +"x not in s" returns the negation of "x in s". All built-in sequences +and set types support this as well as dictionary, for which "in" tests +whether the dictionary has a given key. For container types such as +list, tuple, set, frozenset, dict, or collections.deque, the +expression "x in y" is equivalent to "any(x is e or x == e for e in +y)". + +For the string and bytes types, "x in y" is "True" if and only if *x* +is a substring of *y*. An equivalent test is "y.find(x) != -1". +Empty strings are always considered to be a substring of any other +string, so """ in "abc"" will return "True". + +For user-defined classes which define the "__contains__()" method, "x +in y" returns "True" if "y.__contains__(x)" returns a true value, and +"False" otherwise. + +For user-defined classes which do not define "__contains__()" but do +define "__iter__()", "x in y" is "True" if some value "z", for which +the expression "x is z or x == z" is true, is produced while iterating +over "y". If an exception is raised during the iteration, it is as if +"in" raised that exception. + +Lastly, the old-style iteration protocol is tried: if a class defines +"__getitem__()", "x in y" is "True" if and only if there is a non- +negative integer index *i* such that "x is y[i] or x == y[i]", and no +lower integer index raises the "IndexError" exception. (If any other +exception is raised, it is as if "in" raised that exception). + +The operator "not in" is defined to have the inverse truth value of +"in". + + +Identity comparisons +==================== + +The operators "is" and "is not" test for an object’s identity: "x is +y" is true if and only if *x* and *y* are the same object. An +Object’s identity is determined using the "id()" function. "x is not +y" yields the inverse truth value. [4] +''', + 'compound': r'''Compound statements +******************* + +Compound statements contain (groups of) other statements; they affect +or control the execution of those other statements in some way. In +general, compound statements span multiple lines, although in simple +incarnations a whole compound statement may be contained in one line. + +The "if", "while" and "for" statements implement traditional control +flow constructs. "try" specifies exception handlers and/or cleanup +code for a group of statements, while the "with" statement allows the +execution of initialization and finalization code around a block of +code. Function and class definitions are also syntactically compound +statements. + +A compound statement consists of one or more ‘clauses.’ A clause +consists of a header and a ‘suite.’ The clause headers of a +particular compound statement are all at the same indentation level. +Each clause header begins with a uniquely identifying keyword and ends +with a colon. A suite is a group of statements controlled by a +clause. A suite can be one or more semicolon-separated simple +statements on the same line as the header, following the header’s +colon, or it can be one or more indented statements on subsequent +lines. Only the latter form of a suite can contain nested compound +statements; the following is illegal, mostly because it wouldn’t be +clear to which "if" clause a following "else" clause would belong: + + if test1: if test2: print(x) + +Also note that the semicolon binds tighter than the colon in this +context, so that in the following example, either all or none of the +"print()" calls are executed: + + if x < y < z: print(x); print(y); print(z) + +Summarizing: + + compound_stmt ::= if_stmt + | while_stmt + | for_stmt + | try_stmt + | with_stmt + | match_stmt + | funcdef + | classdef + | async_with_stmt + | async_for_stmt + | async_funcdef + suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT + statement ::= stmt_list NEWLINE | compound_stmt + stmt_list ::= simple_stmt (";" simple_stmt)* [";"] + +Note that statements always end in a "NEWLINE" possibly followed by a +"DEDENT". Also note that optional continuation clauses always begin +with a keyword that cannot start a statement, thus there are no +ambiguities (the ‘dangling "else"’ problem is solved in Python by +requiring nested "if" statements to be indented). + +The formatting of the grammar rules in the following sections places +each clause on a separate line for clarity. + + +The "if" statement +================== + +The "if" statement is used for conditional execution: + + if_stmt ::= "if" assignment_expression ":" suite + ("elif" assignment_expression ":" suite)* + ["else" ":" suite] + +It selects exactly one of the suites by evaluating the expressions one +by one until one is found to be true (see section Boolean operations +for the definition of true and false); then that suite is executed +(and no other part of the "if" statement is executed or evaluated). +If all expressions are false, the suite of the "else" clause, if +present, is executed. + + +The "while" statement +===================== + +The "while" statement is used for repeated execution as long as an +expression is true: + + while_stmt ::= "while" assignment_expression ":" suite + ["else" ":" suite] + +This repeatedly tests the expression and, if it is true, executes the +first suite; if the expression is false (which may be the first time +it is tested) the suite of the "else" clause, if present, is executed +and the loop terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and goes back +to testing the expression. + + +The "for" statement +=================== + +The "for" statement is used to iterate over the elements of a sequence +(such as a string, tuple or list) or other iterable object: + + for_stmt ::= "for" target_list "in" starred_list ":" suite + ["else" ":" suite] + +The "starred_list" expression is evaluated once; it should yield an +*iterable* object. An *iterator* is created for that iterable. The +first item provided by the iterator is then assigned to the target +list using the standard rules for assignments (see Assignment +statements), and the suite is executed. This repeats for each item +provided by the iterator. When the iterator is exhausted, the suite +in the "else" clause, if present, is executed, and the loop +terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and continues +with the next item, or with the "else" clause if there is no next +item. + +The for-loop makes assignments to the variables in the target list. +This overwrites all previous assignments to those variables including +those made in the suite of the for-loop: + + for i in range(10): + print(i) + i = 5 # this will not affect the for-loop + # because i will be overwritten with the next + # index in the range + +Names in the target list are not deleted when the loop is finished, +but if the sequence is empty, they will not have been assigned to at +all by the loop. Hint: the built-in type "range()" represents +immutable arithmetic sequences of integers. For instance, iterating +"range(3)" successively yields 0, 1, and then 2. + +Changed in version 3.11: Starred elements are now allowed in the +expression list. + + +The "try" statement +=================== + +The "try" statement specifies exception handlers and/or cleanup code +for a group of statements: + + try_stmt ::= try1_stmt | try2_stmt | try3_stmt + try1_stmt ::= "try" ":" suite + ("except" [expression ["as" identifier]] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try2_stmt ::= "try" ":" suite + ("except" "*" expression ["as" identifier] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try3_stmt ::= "try" ":" suite + "finally" ":" suite + +Additional information on exceptions can be found in section +Exceptions, and information on using the "raise" statement to generate +exceptions may be found in section The raise statement. + + +"except" clause +--------------- + +The "except" clause(s) specify one or more exception handlers. When no +exception occurs in the "try" clause, no exception handler is +executed. When an exception occurs in the "try" suite, a search for an +exception handler is started. This search inspects the "except" +clauses in turn until one is found that matches the exception. An +expression-less "except" clause, if present, must be last; it matches +any exception. + +For an "except" clause with an expression, the expression must +evaluate to an exception type or a tuple of exception types. The +raised exception matches an "except" clause whose expression evaluates +to the class or a *non-virtual base class* of the exception object, or +to a tuple that contains such a class. + +If no "except" clause matches the exception, the search for an +exception handler continues in the surrounding code and on the +invocation stack. [1] + +If the evaluation of an expression in the header of an "except" clause +raises an exception, the original search for a handler is canceled and +a search starts for the new exception in the surrounding code and on +the call stack (it is treated as if the entire "try" statement raised +the exception). + +When a matching "except" clause is found, the exception is assigned to +the target specified after the "as" keyword in that "except" clause, +if present, and the "except" clause’s suite is executed. All "except" +clauses must have an executable block. When the end of this block is +reached, execution continues normally after the entire "try" +statement. (This means that if two nested handlers exist for the same +exception, and the exception occurs in the "try" clause of the inner +handler, the outer handler will not handle the exception.) + +When an exception has been assigned using "as target", it is cleared +at the end of the "except" clause. This is as if + + except E as N: + foo + +was translated to + + except E as N: + try: + foo + finally: + del N + +This means the exception must be assigned to a different name to be +able to refer to it after the "except" clause. Exceptions are cleared +because with the traceback attached to them, they form a reference +cycle with the stack frame, keeping all locals in that frame alive +until the next garbage collection occurs. + +Before an "except" clause’s suite is executed, the exception is stored +in the "sys" module, where it can be accessed from within the body of +the "except" clause by calling "sys.exception()". When leaving an +exception handler, the exception stored in the "sys" module is reset +to its previous value: + + >>> print(sys.exception()) + None + >>> try: + ... raise TypeError + ... except: + ... print(repr(sys.exception())) + ... try: + ... raise ValueError + ... except: + ... print(repr(sys.exception())) + ... print(repr(sys.exception())) + ... + TypeError() + ValueError() + TypeError() + >>> print(sys.exception()) + None + + +"except*" clause +---------------- + +The "except*" clause(s) are used for handling "ExceptionGroup"s. The +exception type for matching is interpreted as in the case of "except", +but in the case of exception groups we can have partial matches when +the type matches some of the exceptions in the group. This means that +multiple "except*" clauses can execute, each handling part of the +exception group. Each clause executes at most once and handles an +exception group of all matching exceptions. Each exception in the +group is handled by at most one "except*" clause, the first that +matches it. + + >>> try: + ... raise ExceptionGroup("eg", + ... [ValueError(1), TypeError(2), OSError(3), OSError(4)]) + ... except* TypeError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... except* OSError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... + caught with nested (TypeError(2),) + caught with nested (OSError(3), OSError(4)) + + Exception Group Traceback (most recent call last): + | File "", line 2, in + | ExceptionGroup: eg + +-+---------------- 1 ---------------- + | ValueError: 1 + +------------------------------------ + +Any remaining exceptions that were not handled by any "except*" clause +are re-raised at the end, along with all exceptions that were raised +from within the "except*" clauses. If this list contains more than one +exception to reraise, they are combined into an exception group. + +If the raised exception is not an exception group and its type matches +one of the "except*" clauses, it is caught and wrapped by an exception +group with an empty message string. + + >>> try: + ... raise BlockingIOError + ... except* BlockingIOError as e: + ... print(repr(e)) + ... + ExceptionGroup('', (BlockingIOError())) + +An "except*" clause must have a matching expression; it cannot be +"except*:". Furthermore, this expression cannot contain exception +group types, because that would have ambiguous semantics. + +It is not possible to mix "except" and "except*" in the same "try". +"break", "continue" and "return" cannot appear in an "except*" clause. + + +"else" clause +------------- + +The optional "else" clause is executed if the control flow leaves the +"try" suite, no exception was raised, and no "return", "continue", or +"break" statement was executed. Exceptions in the "else" clause are +not handled by the preceding "except" clauses. + + +"finally" clause +---------------- + +If "finally" is present, it specifies a ‘cleanup’ handler. The "try" +clause is executed, including any "except" and "else" clauses. If an +exception occurs in any of the clauses and is not handled, the +exception is temporarily saved. The "finally" clause is executed. If +there is a saved exception it is re-raised at the end of the "finally" +clause. If the "finally" clause raises another exception, the saved +exception is set as the context of the new exception. If the "finally" +clause executes a "return", "break" or "continue" statement, the saved +exception is discarded: + + >>> def f(): + ... try: + ... 1/0 + ... finally: + ... return 42 + ... + >>> f() + 42 + +The exception information is not available to the program during +execution of the "finally" clause. + +When a "return", "break" or "continue" statement is executed in the +"try" suite of a "try"…"finally" statement, the "finally" clause is +also executed ‘on the way out.’ + +The return value of a function is determined by the last "return" +statement executed. Since the "finally" clause always executes, a +"return" statement executed in the "finally" clause will always be the +last one executed: + + >>> def foo(): + ... try: + ... return 'try' + ... finally: + ... return 'finally' + ... + >>> foo() + 'finally' + +Changed in version 3.8: Prior to Python 3.8, a "continue" statement +was illegal in the "finally" clause due to a problem with the +implementation. + + +The "with" statement +==================== + +The "with" statement is used to wrap the execution of a block with +methods defined by a context manager (see section With Statement +Context Managers). This allows common "try"…"except"…"finally" usage +patterns to be encapsulated for convenient reuse. + + with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | with_stmt_contents ) ":" suite + with_stmt_contents ::= with_item ("," with_item)* + with_item ::= expression ["as" target] + +The execution of the "with" statement with one “item†proceeds as +follows: + +1. The context expression (the expression given in the "with_item") is + evaluated to obtain a context manager. + +2. The context manager’s "__enter__()" is loaded for later use. + +3. The context manager’s "__exit__()" is loaded for later use. + +4. The context manager’s "__enter__()" method is invoked. + +5. If a target was included in the "with" statement, the return value + from "__enter__()" is assigned to it. + + Note: + + The "with" statement guarantees that if the "__enter__()" method + returns without an error, then "__exit__()" will always be + called. Thus, if an error occurs during the assignment to the + target list, it will be treated the same as an error occurring + within the suite would be. See step 7 below. + +6. The suite is executed. + +7. The context manager’s "__exit__()" method is invoked. If an + exception caused the suite to be exited, its type, value, and + traceback are passed as arguments to "__exit__()". Otherwise, three + "None" arguments are supplied. + + If the suite was exited due to an exception, and the return value + from the "__exit__()" method was false, the exception is reraised. + If the return value was true, the exception is suppressed, and + execution continues with the statement following the "with" + statement. + + If the suite was exited for any reason other than an exception, the + return value from "__exit__()" is ignored, and execution proceeds + at the normal location for the kind of exit that was taken. + +The following code: + + with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + enter = type(manager).__enter__ + exit = type(manager).__exit__ + value = enter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not exit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + exit(manager, None, None, None) + +With more than one item, the context managers are processed as if +multiple "with" statements were nested: + + with A() as a, B() as b: + SUITE + +is semantically equivalent to: + + with A() as a: + with B() as b: + SUITE + +You can also write multi-item context managers in multiple lines if +the items are surrounded by parentheses. For example: + + with ( + A() as a, + B() as b, + ): + SUITE + +Changed in version 3.1: Support for multiple context expressions. + +Changed in version 3.10: Support for using grouping parentheses to +break the statement in multiple lines. + +See also: + + **PEP 343** - The “with†statement + The specification, background, and examples for the Python "with" + statement. + + +The "match" statement +===================== + +Added in version 3.10. + +The match statement is used for pattern matching. Syntax: + + match_stmt ::= 'match' subject_expr ":" NEWLINE INDENT case_block+ DEDENT + subject_expr ::= star_named_expression "," star_named_expressions? + | named_expression + case_block ::= 'case' patterns [guard] ":" block + +Note: + + This section uses single quotes to denote soft keywords. + +Pattern matching takes a pattern as input (following "case") and a +subject value (following "match"). The pattern (which may contain +subpatterns) is matched against the subject value. The outcomes are: + +* A match success or failure (also termed a pattern success or + failure). + +* Possible binding of matched values to a name. The prerequisites for + this are further discussed below. + +The "match" and "case" keywords are soft keywords. + +See also: + + * **PEP 634** – Structural Pattern Matching: Specification + + * **PEP 636** – Structural Pattern Matching: Tutorial + + +Overview +-------- + +Here’s an overview of the logical flow of a match statement: + +1. The subject expression "subject_expr" is evaluated and a resulting + subject value obtained. If the subject expression contains a comma, + a tuple is constructed using the standard rules. + +2. Each pattern in a "case_block" is attempted to match with the + subject value. The specific rules for success or failure are + described below. The match attempt can also bind some or all of the + standalone names within the pattern. The precise pattern binding + rules vary per pattern type and are specified below. **Name + bindings made during a successful pattern match outlive the + executed block and can be used after the match statement**. + + Note: + + During failed pattern matches, some subpatterns may succeed. Do + not rely on bindings being made for a failed match. Conversely, + do not rely on variables remaining unchanged after a failed + match. The exact behavior is dependent on implementation and may + vary. This is an intentional decision made to allow different + implementations to add optimizations. + +3. If the pattern succeeds, the corresponding guard (if present) is + evaluated. In this case all name bindings are guaranteed to have + happened. + + * If the guard evaluates as true or is missing, the "block" inside + "case_block" is executed. + + * Otherwise, the next "case_block" is attempted as described above. + + * If there are no further case blocks, the match statement is + completed. + +Note: + + Users should generally never rely on a pattern being evaluated. + Depending on implementation, the interpreter may cache values or use + other optimizations which skip repeated evaluations. + +A sample match statement: + + >>> flag = False + >>> match (100, 200): + ... case (100, 300): # Mismatch: 200 != 300 + ... print('Case 1') + ... case (100, 200) if flag: # Successful match, but guard fails + ... print('Case 2') + ... case (100, y): # Matches and binds y to 200 + ... print(f'Case 3, y: {y}') + ... case _: # Pattern not attempted + ... print('Case 4, I match anything!') + ... + Case 3, y: 200 + +In this case, "if flag" is a guard. Read more about that in the next +section. + + +Guards +------ + + guard ::= "if" named_expression + +A "guard" (which is part of the "case") must succeed for code inside +the "case" block to execute. It takes the form: "if" followed by an +expression. + +The logical flow of a "case" block with a "guard" follows: + +1. Check that the pattern in the "case" block succeeded. If the + pattern failed, the "guard" is not evaluated and the next "case" + block is checked. + +2. If the pattern succeeded, evaluate the "guard". + + * If the "guard" condition evaluates as true, the case block is + selected. + + * If the "guard" condition evaluates as false, the case block is + not selected. + + * If the "guard" raises an exception during evaluation, the + exception bubbles up. + +Guards are allowed to have side effects as they are expressions. +Guard evaluation must proceed from the first to the last case block, +one at a time, skipping case blocks whose pattern(s) don’t all +succeed. (I.e., guard evaluation must happen in order.) Guard +evaluation must stop once a case block is selected. + + +Irrefutable Case Blocks +----------------------- + +An irrefutable case block is a match-all case block. A match +statement may have at most one irrefutable case block, and it must be +last. + +A case block is considered irrefutable if it has no guard and its +pattern is irrefutable. A pattern is considered irrefutable if we can +prove from its syntax alone that it will always succeed. Only the +following patterns are irrefutable: + +* AS Patterns whose left-hand side is irrefutable + +* OR Patterns containing at least one irrefutable pattern + +* Capture Patterns + +* Wildcard Patterns + +* parenthesized irrefutable patterns + + +Patterns +-------- + +Note: + + This section uses grammar notations beyond standard EBNF: + + * the notation "SEP.RULE+" is shorthand for "RULE (SEP RULE)*" + + * the notation "!RULE" is shorthand for a negative lookahead + assertion + +The top-level syntax for "patterns" is: + + patterns ::= open_sequence_pattern | pattern + pattern ::= as_pattern | or_pattern + closed_pattern ::= | literal_pattern + | capture_pattern + | wildcard_pattern + | value_pattern + | group_pattern + | sequence_pattern + | mapping_pattern + | class_pattern + +The descriptions below will include a description “in simple terms†of +what a pattern does for illustration purposes (credits to Raymond +Hettinger for a document that inspired most of the descriptions). Note +that these descriptions are purely for illustration purposes and **may +not** reflect the underlying implementation. Furthermore, they do not +cover all valid forms. + + +OR Patterns +~~~~~~~~~~~ + +An OR pattern is two or more patterns separated by vertical bars "|". +Syntax: + + or_pattern ::= "|".closed_pattern+ + +Only the final subpattern may be irrefutable, and each subpattern must +bind the same set of names to avoid ambiguity. + +An OR pattern matches each of its subpatterns in turn to the subject +value, until one succeeds. The OR pattern is then considered +successful. Otherwise, if none of the subpatterns succeed, the OR +pattern fails. + +In simple terms, "P1 | P2 | ..." will try to match "P1", if it fails +it will try to match "P2", succeeding immediately if any succeeds, +failing otherwise. + + +AS Patterns +~~~~~~~~~~~ + +An AS pattern matches an OR pattern on the left of the "as" keyword +against a subject. Syntax: + + as_pattern ::= or_pattern "as" capture_pattern + +If the OR pattern fails, the AS pattern fails. Otherwise, the AS +pattern binds the subject to the name on the right of the as keyword +and succeeds. "capture_pattern" cannot be a "_". + +In simple terms "P as NAME" will match with "P", and on success it +will set "NAME = ". + + +Literal Patterns +~~~~~~~~~~~~~~~~ + +A literal pattern corresponds to most literals in Python. Syntax: + + literal_pattern ::= signed_number + | signed_number "+" NUMBER + | signed_number "-" NUMBER + | strings + | "None" + | "True" + | "False" + signed_number ::= ["-"] NUMBER + +The rule "strings" and the token "NUMBER" are defined in the standard +Python grammar. Triple-quoted strings are supported. Raw strings and +byte strings are supported. f-strings are not supported. + +The forms "signed_number '+' NUMBER" and "signed_number '-' NUMBER" +are for expressing complex numbers; they require a real number on the +left and an imaginary number on the right. E.g. "3 + 4j". + +In simple terms, "LITERAL" will succeed only if " == +LITERAL". For the singletons "None", "True" and "False", the "is" +operator is used. + + +Capture Patterns +~~~~~~~~~~~~~~~~ + +A capture pattern binds the subject value to a name. Syntax: + + capture_pattern ::= !'_' NAME + +A single underscore "_" is not a capture pattern (this is what "!'_'" +expresses). It is instead treated as a "wildcard_pattern". + +In a given pattern, a given name can only be bound once. E.g. "case +x, x: ..." is invalid while "case [x] | x: ..." is allowed. + +Capture patterns always succeed. The binding follows scoping rules +established by the assignment expression operator in **PEP 572**; the +name becomes a local variable in the closest containing function scope +unless there’s an applicable "global" or "nonlocal" statement. + +In simple terms "NAME" will always succeed and it will set "NAME = +". + + +Wildcard Patterns +~~~~~~~~~~~~~~~~~ + +A wildcard pattern always succeeds (matches anything) and binds no +name. Syntax: + + wildcard_pattern ::= '_' + +"_" is a soft keyword within any pattern, but only within patterns. +It is an identifier, as usual, even within "match" subject +expressions, "guard"s, and "case" blocks. + +In simple terms, "_" will always succeed. + + +Value Patterns +~~~~~~~~~~~~~~ + +A value pattern represents a named value in Python. Syntax: + + value_pattern ::= attr + attr ::= name_or_attr "." NAME + name_or_attr ::= attr | NAME + +The dotted name in the pattern is looked up using standard Python name +resolution rules. The pattern succeeds if the value found compares +equal to the subject value (using the "==" equality operator). + +In simple terms "NAME1.NAME2" will succeed only if " == +NAME1.NAME2" + +Note: + + If the same value occurs multiple times in the same match statement, + the interpreter may cache the first value found and reuse it rather + than repeat the same lookup. This cache is strictly tied to a given + execution of a given match statement. + + +Group Patterns +~~~~~~~~~~~~~~ + +A group pattern allows users to add parentheses around patterns to +emphasize the intended grouping. Otherwise, it has no additional +syntax. Syntax: + + group_pattern ::= "(" pattern ")" + +In simple terms "(P)" has the same effect as "P". + + +Sequence Patterns +~~~~~~~~~~~~~~~~~ + +A sequence pattern contains several subpatterns to be matched against +sequence elements. The syntax is similar to the unpacking of a list or +tuple. + + sequence_pattern ::= "[" [maybe_sequence_pattern] "]" + | "(" [open_sequence_pattern] ")" + open_sequence_pattern ::= maybe_star_pattern "," [maybe_sequence_pattern] + maybe_sequence_pattern ::= ",".maybe_star_pattern+ ","? + maybe_star_pattern ::= star_pattern | pattern + star_pattern ::= "*" (capture_pattern | wildcard_pattern) + +There is no difference if parentheses or square brackets are used for +sequence patterns (i.e. "(...)" vs "[...]" ). + +Note: + + A single pattern enclosed in parentheses without a trailing comma + (e.g. "(3 | 4)") is a group pattern. While a single pattern enclosed + in square brackets (e.g. "[3 | 4]") is still a sequence pattern. + +At most one star subpattern may be in a sequence pattern. The star +subpattern may occur in any position. If no star subpattern is +present, the sequence pattern is a fixed-length sequence pattern; +otherwise it is a variable-length sequence pattern. + +The following is the logical flow for matching a sequence pattern +against a subject value: + +1. If the subject value is not a sequence [2], the sequence pattern + fails. + +2. If the subject value is an instance of "str", "bytes" or + "bytearray" the sequence pattern fails. + +3. The subsequent steps depend on whether the sequence pattern is + fixed or variable-length. + + If the sequence pattern is fixed-length: + + 1. If the length of the subject sequence is not equal to the number + of subpatterns, the sequence pattern fails + + 2. Subpatterns in the sequence pattern are matched to their + corresponding items in the subject sequence from left to right. + Matching stops as soon as a subpattern fails. If all + subpatterns succeed in matching their corresponding item, the + sequence pattern succeeds. + + Otherwise, if the sequence pattern is variable-length: + + 1. If the length of the subject sequence is less than the number of + non-star subpatterns, the sequence pattern fails. + + 2. The leading non-star subpatterns are matched to their + corresponding items as for fixed-length sequences. + + 3. If the previous step succeeds, the star subpattern matches a + list formed of the remaining subject items, excluding the + remaining items corresponding to non-star subpatterns following + the star subpattern. + + 4. Remaining non-star subpatterns are matched to their + corresponding subject items, as for a fixed-length sequence. + + Note: + + The length of the subject sequence is obtained via "len()" (i.e. + via the "__len__()" protocol). This length may be cached by the + interpreter in a similar manner as value patterns. + +In simple terms "[P1, P2, P3," … ", P]" matches only if all the +following happens: + +* check "" is a sequence + +* "len(subject) == " + +* "P1" matches "[0]" (note that this match can also bind + names) + +* "P2" matches "[1]" (note that this match can also bind + names) + +* … and so on for the corresponding pattern/element. + + +Mapping Patterns +~~~~~~~~~~~~~~~~ + +A mapping pattern contains one or more key-value patterns. The syntax +is similar to the construction of a dictionary. Syntax: + + mapping_pattern ::= "{" [items_pattern] "}" + items_pattern ::= ",".key_value_pattern+ ","? + key_value_pattern ::= (literal_pattern | value_pattern) ":" pattern + | double_star_pattern + double_star_pattern ::= "**" capture_pattern + +At most one double star pattern may be in a mapping pattern. The +double star pattern must be the last subpattern in the mapping +pattern. + +Duplicate keys in mapping patterns are disallowed. Duplicate literal +keys will raise a "SyntaxError". Two keys that otherwise have the same +value will raise a "ValueError" at runtime. + +The following is the logical flow for matching a mapping pattern +against a subject value: + +1. If the subject value is not a mapping [3],the mapping pattern + fails. + +2. If every key given in the mapping pattern is present in the subject + mapping, and the pattern for each key matches the corresponding + item of the subject mapping, the mapping pattern succeeds. + +3. If duplicate keys are detected in the mapping pattern, the pattern + is considered invalid. A "SyntaxError" is raised for duplicate + literal values; or a "ValueError" for named keys of the same value. + +Note: + + Key-value pairs are matched using the two-argument form of the + mapping subject’s "get()" method. Matched key-value pairs must + already be present in the mapping, and not created on-the-fly via + "__missing__()" or "__getitem__()". + +In simple terms "{KEY1: P1, KEY2: P2, ... }" matches only if all the +following happens: + +* check "" is a mapping + +* "KEY1 in " + +* "P1" matches "[KEY1]" + +* … and so on for the corresponding KEY/pattern pair. + + +Class Patterns +~~~~~~~~~~~~~~ + +A class pattern represents a class and its positional and keyword +arguments (if any). Syntax: + + class_pattern ::= name_or_attr "(" [pattern_arguments ","?] ")" + pattern_arguments ::= positional_patterns ["," keyword_patterns] + | keyword_patterns + positional_patterns ::= ",".pattern+ + keyword_patterns ::= ",".keyword_pattern+ + keyword_pattern ::= NAME "=" pattern + +The same keyword should not be repeated in class patterns. + +The following is the logical flow for matching a class pattern against +a subject value: + +1. If "name_or_attr" is not an instance of the builtin "type" , raise + "TypeError". + +2. If the subject value is not an instance of "name_or_attr" (tested + via "isinstance()"), the class pattern fails. + +3. If no pattern arguments are present, the pattern succeeds. + Otherwise, the subsequent steps depend on whether keyword or + positional argument patterns are present. + + For a number of built-in types (specified below), a single + positional subpattern is accepted which will match the entire + subject; for these types keyword patterns also work as for other + types. + + If only keyword patterns are present, they are processed as + follows, one by one: + + I. The keyword is looked up as an attribute on the subject. + + * If this raises an exception other than "AttributeError", the + exception bubbles up. + + * If this raises "AttributeError", the class pattern has failed. + + * Else, the subpattern associated with the keyword pattern is + matched against the subject’s attribute value. If this fails, + the class pattern fails; if this succeeds, the match proceeds + to the next keyword. + + II. If all keyword patterns succeed, the class pattern succeeds. + + If any positional patterns are present, they are converted to + keyword patterns using the "__match_args__" attribute on the class + "name_or_attr" before matching: + + I. The equivalent of "getattr(cls, "__match_args__", ())" is + called. + + * If this raises an exception, the exception bubbles up. + + * If the returned value is not a tuple, the conversion fails and + "TypeError" is raised. + + * If there are more positional patterns than + "len(cls.__match_args__)", "TypeError" is raised. + + * Otherwise, positional pattern "i" is converted to a keyword + pattern using "__match_args__[i]" as the keyword. + "__match_args__[i]" must be a string; if not "TypeError" is + raised. + + * If there are duplicate keywords, "TypeError" is raised. + + See also: + + Customizing positional arguments in class pattern matching + + II. Once all positional patterns have been converted to keyword + patterns, + the match proceeds as if there were only keyword patterns. + + For the following built-in types the handling of positional + subpatterns is different: + + * "bool" + + * "bytearray" + + * "bytes" + + * "dict" + + * "float" + + * "frozenset" + + * "int" + + * "list" + + * "set" + + * "str" + + * "tuple" + + These classes accept a single positional argument, and the pattern + there is matched against the whole object rather than an attribute. + For example "int(0|1)" matches the value "0", but not the value + "0.0". + +In simple terms "CLS(P1, attr=P2)" matches only if the following +happens: + +* "isinstance(, CLS)" + +* convert "P1" to a keyword pattern using "CLS.__match_args__" + +* For each keyword argument "attr=P2": + + * "hasattr(, "attr")" + + * "P2" matches ".attr" + +* … and so on for the corresponding keyword argument/pattern pair. + +See also: + + * **PEP 634** – Structural Pattern Matching: Specification + + * **PEP 636** – Structural Pattern Matching: Tutorial + + +Function definitions +==================== + +A function definition defines a user-defined function object (see +section The standard type hierarchy): + + funcdef ::= [decorators] "def" funcname [type_params] "(" [parameter_list] ")" + ["->" expression] ":" suite + decorators ::= decorator+ + decorator ::= "@" assignment_expression NEWLINE + parameter_list ::= defparameter ("," defparameter)* "," "/" ["," [parameter_list_no_posonly]] + | parameter_list_no_posonly + parameter_list_no_posonly ::= defparameter ("," defparameter)* ["," [parameter_list_starargs]] + | parameter_list_starargs + parameter_list_starargs ::= "*" [star_parameter] ("," defparameter)* ["," ["**" parameter [","]]] + | "**" parameter [","] + parameter ::= identifier [":" expression] + star_parameter ::= identifier [":" ["*"] expression] + defparameter ::= parameter ["=" expression] + funcname ::= identifier + +A function definition is an executable statement. Its execution binds +the function name in the current local namespace to a function object +(a wrapper around the executable code for the function). This +function object contains a reference to the current global namespace +as the global namespace to be used when the function is called. + +The function definition does not execute the function body; this gets +executed only when the function is called. [4] + +A function definition may be wrapped by one or more *decorator* +expressions. Decorator expressions are evaluated when the function is +defined, in the scope that contains the function definition. The +result must be a callable, which is invoked with the function object +as the only argument. The returned value is bound to the function name +instead of the function object. Multiple decorators are applied in +nested fashion. For example, the following code + + @f1(arg) + @f2 + def func(): pass + +is roughly equivalent to + + def func(): pass + func = f1(arg)(f2(func)) + +except that the original function is not temporarily bound to the name +"func". + +Changed in version 3.9: Functions may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets between the +function’s name and the opening parenthesis for its parameter list. +This indicates to static type checkers that the function is generic. +At runtime, the type parameters can be retrieved from the function’s +"__type_params__" attribute. See Generic functions for more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +When one or more *parameters* have the form *parameter* "=" +*expression*, the function is said to have “default parameter values.†+For a parameter with a default value, the corresponding *argument* may +be omitted from a call, in which case the parameter’s default value is +substituted. If a parameter has a default value, all following +parameters up until the “"*"†must also have a default value — this is +a syntactic restriction that is not expressed by the grammar. + +**Default parameter values are evaluated from left to right when the +function definition is executed.** This means that the expression is +evaluated once, when the function is defined, and that the same “pre- +computed†value is used for each call. This is especially important +to understand when a default parameter value is a mutable object, such +as a list or a dictionary: if the function modifies the object (e.g. +by appending an item to a list), the default parameter value is in +effect modified. This is generally not what was intended. A way +around this is to use "None" as the default, and explicitly test for +it in the body of the function, e.g.: + + def whats_on_the_telly(penguin=None): + if penguin is None: + penguin = [] + penguin.append("property of the zoo") + return penguin + +Function call semantics are described in more detail in section Calls. +A function call always assigns values to all parameters mentioned in +the parameter list, either from positional arguments, from keyword +arguments, or from default values. If the form “"*identifier"†is +present, it is initialized to a tuple receiving any excess positional +parameters, defaulting to the empty tuple. If the form +“"**identifier"†is present, it is initialized to a new ordered +mapping receiving any excess keyword arguments, defaulting to a new +empty mapping of the same type. Parameters after “"*"†or +“"*identifier"†are keyword-only parameters and may only be passed by +keyword arguments. Parameters before “"/"†are positional-only +parameters and may only be passed by positional arguments. + +Changed in version 3.8: The "/" function parameter syntax may be used +to indicate positional-only parameters. See **PEP 570** for details. + +Parameters may have an *annotation* of the form “": expression"†+following the parameter name. Any parameter may have an annotation, +even those of the form "*identifier" or "**identifier". (As a special +case, parameters of the form "*identifier" may have an annotation “": +*expression"â€.) Functions may have “return†annotation of the form +“"-> expression"†after the parameter list. These annotations can be +any valid Python expression. The presence of annotations does not +change the semantics of a function. See Annotations for more +information on annotations. + +Changed in version 3.11: Parameters of the form “"*identifier"†may +have an annotation “": *expression"â€. See **PEP 646**. + +It is also possible to create anonymous functions (functions not bound +to a name), for immediate use in expressions. This uses lambda +expressions, described in section Lambdas. Note that the lambda +expression is merely a shorthand for a simplified function definition; +a function defined in a “"def"†statement can be passed around or +assigned to another name just like a function defined by a lambda +expression. The “"def"†form is actually more powerful since it +allows the execution of multiple statements and annotations. + +**Programmer’s note:** Functions are first-class objects. A “"def"†+statement executed inside a function definition defines a local +function that can be returned or passed around. Free variables used +in the nested function can access the local variables of the function +containing the def. See section Naming and binding for details. + +See also: + + **PEP 3107** - Function Annotations + The original specification for function annotations. + + **PEP 484** - Type Hints + Definition of a standard meaning for annotations: type hints. + + **PEP 526** - Syntax for Variable Annotations + Ability to type hint variable declarations, including class + variables and instance variables. + + **PEP 563** - Postponed Evaluation of Annotations + Support for forward references within annotations by preserving + annotations in a string form at runtime instead of eager + evaluation. + + **PEP 318** - Decorators for Functions and Methods + Function and method decorators were introduced. Class decorators + were introduced in **PEP 3129**. + + +Class definitions +================= + +A class definition defines a class object (see section The standard +type hierarchy): + + classdef ::= [decorators] "class" classname [type_params] [inheritance] ":" suite + inheritance ::= "(" [argument_list] ")" + classname ::= identifier + +A class definition is an executable statement. The inheritance list +usually gives a list of base classes (see Metaclasses for more +advanced uses), so each item in the list should evaluate to a class +object which allows subclassing. Classes without an inheritance list +inherit, by default, from the base class "object"; hence, + + class Foo: + pass + +is equivalent to + + class Foo(object): + pass + +The class’s suite is then executed in a new execution frame (see +Naming and binding), using a newly created local namespace and the +original global namespace. (Usually, the suite contains mostly +function definitions.) When the class’s suite finishes execution, its +execution frame is discarded but its local namespace is saved. [5] A +class object is then created using the inheritance list for the base +classes and the saved local namespace for the attribute dictionary. +The class name is bound to this class object in the original local +namespace. + +The order in which attributes are defined in the class body is +preserved in the new class’s "__dict__". Note that this is reliable +only right after the class is created and only for classes that were +defined using the definition syntax. + +Class creation can be customized heavily using metaclasses. + +Classes can also be decorated: just like when decorating functions, + + @f1(arg) + @f2 + class Foo: pass + +is roughly equivalent to + + class Foo: pass + Foo = f1(arg)(f2(Foo)) + +The evaluation rules for the decorator expressions are the same as for +function decorators. The result is then bound to the class name. + +Changed in version 3.9: Classes may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets immediately +after the class’s name. This indicates to static type checkers that +the class is generic. At runtime, the type parameters can be retrieved +from the class’s "__type_params__" attribute. See Generic classes for +more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +**Programmer’s note:** Variables defined in the class definition are +class attributes; they are shared by instances. Instance attributes +can be set in a method with "self.name = value". Both class and +instance attributes are accessible through the notation “"self.name"â€, +and an instance attribute hides a class attribute with the same name +when accessed in this way. Class attributes can be used as defaults +for instance attributes, but using mutable values there can lead to +unexpected results. Descriptors can be used to create instance +variables with different implementation details. + +See also: + + **PEP 3115** - Metaclasses in Python 3000 + The proposal that changed the declaration of metaclasses to the + current syntax, and the semantics for how classes with + metaclasses are constructed. + + **PEP 3129** - Class Decorators + The proposal that added class decorators. Function and method + decorators were introduced in **PEP 318**. + + +Coroutines +========== + +Added in version 3.5. + + +Coroutine function definition +----------------------------- + + async_funcdef ::= [decorators] "async" "def" funcname "(" [parameter_list] ")" + ["->" expression] ":" suite + +Execution of Python coroutines can be suspended and resumed at many +points (see *coroutine*). "await" expressions, "async for" and "async +with" can only be used in the body of a coroutine function. + +Functions defined with "async def" syntax are always coroutine +functions, even if they do not contain "await" or "async" keywords. + +It is a "SyntaxError" to use a "yield from" expression inside the body +of a coroutine function. + +An example of a coroutine function: + + async def func(param1, param2): + do_stuff() + await some_coroutine() + +Changed in version 3.7: "await" and "async" are now keywords; +previously they were only treated as such inside the body of a +coroutine function. + + +The "async for" statement +------------------------- + + async_for_stmt ::= "async" for_stmt + +An *asynchronous iterable* provides an "__aiter__" method that +directly returns an *asynchronous iterator*, which can call +asynchronous code in its "__anext__" method. + +The "async for" statement allows convenient iteration over +asynchronous iterables. + +The following code: + + async for TARGET in ITER: + SUITE + else: + SUITE2 + +Is semantically equivalent to: + + iter = (ITER) + iter = type(iter).__aiter__(iter) + running = True + + while running: + try: + TARGET = await type(iter).__anext__(iter) + except StopAsyncIteration: + running = False + else: + SUITE + else: + SUITE2 + +See also "__aiter__()" and "__anext__()" for details. + +It is a "SyntaxError" to use an "async for" statement outside the body +of a coroutine function. + + +The "async with" statement +-------------------------- + + async_with_stmt ::= "async" with_stmt + +An *asynchronous context manager* is a *context manager* that is able +to suspend execution in its *enter* and *exit* methods. + +The following code: + + async with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + aenter = type(manager).__aenter__ + aexit = type(manager).__aexit__ + value = await aenter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not await aexit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + await aexit(manager, None, None, None) + +See also "__aenter__()" and "__aexit__()" for details. + +It is a "SyntaxError" to use an "async with" statement outside the +body of a coroutine function. + +See also: + + **PEP 492** - Coroutines with async and await syntax + The proposal that made coroutines a proper standalone concept in + Python, and added supporting syntax. + + +Type parameter lists +==================== + +Added in version 3.12. + +Changed in version 3.13: Support for default values was added (see +**PEP 696**). + + type_params ::= "[" type_param ("," type_param)* "]" + type_param ::= typevar | typevartuple | paramspec + typevar ::= identifier (":" expression)? ("=" expression)? + typevartuple ::= "*" identifier ("=" expression)? + paramspec ::= "**" identifier ("=" expression)? + +Functions (including coroutines), classes and type aliases may contain +a type parameter list: + + def max[T](args: list[T]) -> T: + ... + + async def amax[T](args: list[T]) -> T: + ... + + class Bag[T]: + def __iter__(self) -> Iterator[T]: + ... + + def add(self, arg: T) -> None: + ... + + type ListOrSet[T] = list[T] | set[T] + +Semantically, this indicates that the function, class, or type alias +is generic over a type variable. This information is primarily used by +static type checkers, and at runtime, generic objects behave much like +their non-generic counterparts. + +Type parameters are declared in square brackets ("[]") immediately +after the name of the function, class, or type alias. The type +parameters are accessible within the scope of the generic object, but +not elsewhere. Thus, after a declaration "def func[T](): pass", the +name "T" is not available in the module scope. Below, the semantics of +generic objects are described with more precision. The scope of type +parameters is modeled with a special function (technically, an +annotation scope) that wraps the creation of the generic object. + +Generic functions, classes, and type aliases have a "__type_params__" +attribute listing their type parameters. + +Type parameters come in three kinds: + +* "typing.TypeVar", introduced by a plain name (e.g., "T"). + Semantically, this represents a single type to a type checker. + +* "typing.TypeVarTuple", introduced by a name prefixed with a single + asterisk (e.g., "*Ts"). Semantically, this stands for a tuple of any + number of types. + +* "typing.ParamSpec", introduced by a name prefixed with two asterisks + (e.g., "**P"). Semantically, this stands for the parameters of a + callable. + +"typing.TypeVar" declarations can define *bounds* and *constraints* +with a colon (":") followed by an expression. A single expression +after the colon indicates a bound (e.g. "T: int"). Semantically, this +means that the "typing.TypeVar" can only represent types that are a +subtype of this bound. A parenthesized tuple of expressions after the +colon indicates a set of constraints (e.g. "T: (str, bytes)"). Each +member of the tuple should be a type (again, this is not enforced at +runtime). Constrained type variables can only take on one of the types +in the list of constraints. + +For "typing.TypeVar"s declared using the type parameter list syntax, +the bound and constraints are not evaluated when the generic object is +created, but only when the value is explicitly accessed through the +attributes "__bound__" and "__constraints__". To accomplish this, the +bounds or constraints are evaluated in a separate annotation scope. + +"typing.TypeVarTuple"s and "typing.ParamSpec"s cannot have bounds or +constraints. + +All three flavors of type parameters can also have a *default value*, +which is used when the type parameter is not explicitly provided. This +is added by appending a single equals sign ("=") followed by an +expression. Like the bounds and constraints of type variables, the +default value is not evaluated when the object is created, but only +when the type parameter’s "__default__" attribute is accessed. To this +end, the default value is evaluated in a separate annotation scope. If +no default value is specified for a type parameter, the "__default__" +attribute is set to the special sentinel object "typing.NoDefault". + +The following example indicates the full set of allowed type parameter +declarations: + + def overly_generic[ + SimpleTypeVar, + TypeVarWithDefault = int, + TypeVarWithBound: int, + TypeVarWithConstraints: (str, bytes), + *SimpleTypeVarTuple = (int, float), + **SimpleParamSpec = (str, bytearray), + ]( + a: SimpleTypeVar, + b: TypeVarWithDefault, + c: TypeVarWithBound, + d: Callable[SimpleParamSpec, TypeVarWithConstraints], + *e: SimpleTypeVarTuple, + ): ... + + +Generic functions +----------------- + +Generic functions are declared as follows: + + def func[T](arg: T): ... + +This syntax is equivalent to: + + annotation-def TYPE_PARAMS_OF_func(): + T = typing.TypeVar("T") + def func(arg: T): ... + func.__type_params__ = (T,) + return func + func = TYPE_PARAMS_OF_func() + +Here "annotation-def" indicates an annotation scope, which is not +actually bound to any name at runtime. (One other liberty is taken in +the translation: the syntax does not go through attribute access on +the "typing" module, but creates an instance of "typing.TypeVar" +directly.) + +The annotations of generic functions are evaluated within the +annotation scope used for declaring the type parameters, but the +function’s defaults and decorators are not. + +The following example illustrates the scoping rules for these cases, +as well as for additional flavors of type parameters: + + @decorator + def func[T: int, *Ts, **P](*args: *Ts, arg: Callable[P, T] = some_default): + ... + +Except for the lazy evaluation of the "TypeVar" bound, this is +equivalent to: + + DEFAULT_OF_arg = some_default + + annotation-def TYPE_PARAMS_OF_func(): + + annotation-def BOUND_OF_T(): + return int + # In reality, BOUND_OF_T() is evaluated only on demand. + T = typing.TypeVar("T", bound=BOUND_OF_T()) + + Ts = typing.TypeVarTuple("Ts") + P = typing.ParamSpec("P") + + def func(*args: *Ts, arg: Callable[P, T] = DEFAULT_OF_arg): + ... + + func.__type_params__ = (T, Ts, P) + return func + func = decorator(TYPE_PARAMS_OF_func()) + +The capitalized names like "DEFAULT_OF_arg" are not actually bound at +runtime. + + +Generic classes +--------------- + +Generic classes are declared as follows: + + class Bag[T]: ... + +This syntax is equivalent to: + + annotation-def TYPE_PARAMS_OF_Bag(): + T = typing.TypeVar("T") + class Bag(typing.Generic[T]): + __type_params__ = (T,) + ... + return Bag + Bag = TYPE_PARAMS_OF_Bag() + +Here again "annotation-def" (not a real keyword) indicates an +annotation scope, and the name "TYPE_PARAMS_OF_Bag" is not actually +bound at runtime. + +Generic classes implicitly inherit from "typing.Generic". The base +classes and keyword arguments of generic classes are evaluated within +the type scope for the type parameters, and decorators are evaluated +outside that scope. This is illustrated by this example: + + @decorator + class Bag(Base[T], arg=T): ... + +This is equivalent to: + + annotation-def TYPE_PARAMS_OF_Bag(): + T = typing.TypeVar("T") + class Bag(Base[T], typing.Generic[T], arg=T): + __type_params__ = (T,) + ... + return Bag + Bag = decorator(TYPE_PARAMS_OF_Bag()) + + +Generic type aliases +-------------------- + +The "type" statement can also be used to create a generic type alias: + + type ListOrSet[T] = list[T] | set[T] + +Except for the lazy evaluation of the value, this is equivalent to: + + annotation-def TYPE_PARAMS_OF_ListOrSet(): + T = typing.TypeVar("T") + + annotation-def VALUE_OF_ListOrSet(): + return list[T] | set[T] + # In reality, the value is lazily evaluated + return typing.TypeAliasType("ListOrSet", VALUE_OF_ListOrSet(), type_params=(T,)) + ListOrSet = TYPE_PARAMS_OF_ListOrSet() + +Here, "annotation-def" (not a real keyword) indicates an annotation +scope. The capitalized names like "TYPE_PARAMS_OF_ListOrSet" are not +actually bound at runtime. + + +Annotations +=========== + +Changed in version 3.14: Annotations are now lazily evaluated by +default. + +Variables and function parameters may carry *annotations*, created by +adding a colon after the name, followed by an expression: + + x: annotation = 1 + def f(param: annotation): ... + +Functions may also carry a return annotation following an arrow: + + def f() -> annotation: ... + +Annotations are conventionally used for *type hints*, but this is not +enforced by the language, and in general annotations may contain +arbitrary expressions. The presence of annotations does not change the +runtime semantics of the code, except if some mechanism is used that +introspects and uses the annotations (such as "dataclasses" or +"functools.singledispatch()"). + +By default, annotations are lazily evaluated in a annotation scope. +This means that they are not evaluated when the code containing the +annotation is evaluated. Instead, the interpreter saves information +that can be used to evaluate the annotation later if requested. The +"annotationlib" module provides tools for evaluating annotations. + +If the future statement "from __future__ import annotations" is +present, all annotations are instead stored as strings: + + >>> from __future__ import annotations + >>> def f(param: annotation): ... + >>> f.__annotations__ + {'param': 'annotation'} + +-[ Footnotes ]- + +[1] The exception is propagated to the invocation stack unless there + is a "finally" clause which happens to raise another exception. + That new exception causes the old one to be lost. + +[2] In pattern matching, a sequence is defined as one of the + following: + + * a class that inherits from "collections.abc.Sequence" + + * a Python class that has been registered as + "collections.abc.Sequence" + + * a builtin class that has its (CPython) "Py_TPFLAGS_SEQUENCE" bit + set + + * a class that inherits from any of the above + + The following standard library classes are sequences: + + * "array.array" + + * "collections.deque" + + * "list" + + * "memoryview" + + * "range" + + * "tuple" + + Note: + + Subject values of type "str", "bytes", and "bytearray" do not + match sequence patterns. + +[3] In pattern matching, a mapping is defined as one of the following: + + * a class that inherits from "collections.abc.Mapping" + + * a Python class that has been registered as + "collections.abc.Mapping" + + * a builtin class that has its (CPython) "Py_TPFLAGS_MAPPING" bit + set + + * a class that inherits from any of the above + + The standard library classes "dict" and "types.MappingProxyType" + are mappings. + +[4] A string literal appearing as the first statement in the function + body is transformed into the function’s "__doc__" attribute and + therefore the function’s *docstring*. + +[5] A string literal appearing as the first statement in the class + body is transformed into the namespace’s "__doc__" item and + therefore the class’s *docstring*. +''', + 'context-managers': r'''With Statement Context Managers +******************************* + +A *context manager* is an object that defines the runtime context to +be established when executing a "with" statement. The context manager +handles the entry into, and the exit from, the desired runtime context +for the execution of the block of code. Context managers are normally +invoked using the "with" statement (described in section The with +statement), but can also be used by directly invoking their methods. + +Typical uses of context managers include saving and restoring various +kinds of global state, locking and unlocking resources, closing opened +files, etc. + +For more information on context managers, see Context Manager Types. +The "object" class itself does not provide the context manager +methods. + +object.__enter__(self) + + Enter the runtime context related to this object. The "with" + statement will bind this method’s return value to the target(s) + specified in the "as" clause of the statement, if any. + +object.__exit__(self, exc_type, exc_value, traceback) + + Exit the runtime context related to this object. The parameters + describe the exception that caused the context to be exited. If the + context was exited without an exception, all three arguments will + be "None". + + If an exception is supplied, and the method wishes to suppress the + exception (i.e., prevent it from being propagated), it should + return a true value. Otherwise, the exception will be processed + normally upon exit from this method. + + Note that "__exit__()" methods should not reraise the passed-in + exception; this is the caller’s responsibility. + +See also: + + **PEP 343** - The “with†statement + The specification, background, and examples for the Python "with" + statement. +''', + 'continue': r'''The "continue" statement +************************ + + continue_stmt ::= "continue" + +"continue" may only occur syntactically nested in a "for" or "while" +loop, but not nested in a function or class definition within that +loop. It continues with the next cycle of the nearest enclosing loop. + +When "continue" passes control out of a "try" statement with a +"finally" clause, that "finally" clause is executed before really +starting the next loop cycle. +''', + 'conversions': r'''Arithmetic conversions +********************** + +When a description of an arithmetic operator below uses the phrase +“the numeric arguments are converted to a common real typeâ€, this +means that the operator implementation for built-in types works as +follows: + +* If both arguments are complex numbers, no conversion is performed; + +* if either argument is a complex or a floating-point number, the + other is converted to a floating-point number; + +* otherwise, both must be integers and no conversion is necessary. + +Some additional rules apply for certain operators (e.g., a string as a +left argument to the ‘%’ operator). Extensions must define their own +conversion behavior. +''', + 'customization': r'''Basic customization +******************* + +object.__new__(cls[, ...]) + + Called to create a new instance of class *cls*. "__new__()" is a + static method (special-cased so you need not declare it as such) + that takes the class of which an instance was requested as its + first argument. The remaining arguments are those passed to the + object constructor expression (the call to the class). The return + value of "__new__()" should be the new object instance (usually an + instance of *cls*). + + Typical implementations create a new instance of the class by + invoking the superclass’s "__new__()" method using + "super().__new__(cls[, ...])" with appropriate arguments and then + modifying the newly created instance as necessary before returning + it. + + If "__new__()" is invoked during object construction and it returns + an instance of *cls*, then the new instance’s "__init__()" method + will be invoked like "__init__(self[, ...])", where *self* is the + new instance and the remaining arguments are the same as were + passed to the object constructor. + + If "__new__()" does not return an instance of *cls*, then the new + instance’s "__init__()" method will not be invoked. + + "__new__()" is intended mainly to allow subclasses of immutable + types (like int, str, or tuple) to customize instance creation. It + is also commonly overridden in custom metaclasses in order to + customize class creation. + +object.__init__(self[, ...]) + + Called after the instance has been created (by "__new__()"), but + before it is returned to the caller. The arguments are those + passed to the class constructor expression. If a base class has an + "__init__()" method, the derived class’s "__init__()" method, if + any, must explicitly call it to ensure proper initialization of the + base class part of the instance; for example: + "super().__init__([args...])". + + Because "__new__()" and "__init__()" work together in constructing + objects ("__new__()" to create it, and "__init__()" to customize + it), no non-"None" value may be returned by "__init__()"; doing so + will cause a "TypeError" to be raised at runtime. + +object.__del__(self) + + Called when the instance is about to be destroyed. This is also + called a finalizer or (improperly) a destructor. If a base class + has a "__del__()" method, the derived class’s "__del__()" method, + if any, must explicitly call it to ensure proper deletion of the + base class part of the instance. + + It is possible (though not recommended!) for the "__del__()" method + to postpone destruction of the instance by creating a new reference + to it. This is called object *resurrection*. It is + implementation-dependent whether "__del__()" is called a second + time when a resurrected object is about to be destroyed; the + current *CPython* implementation only calls it once. + + It is not guaranteed that "__del__()" methods are called for + objects that still exist when the interpreter exits. + "weakref.finalize" provides a straightforward way to register a + cleanup function to be called when an object is garbage collected. + + Note: + + "del x" doesn’t directly call "x.__del__()" — the former + decrements the reference count for "x" by one, and the latter is + only called when "x"’s reference count reaches zero. + + **CPython implementation detail:** It is possible for a reference + cycle to prevent the reference count of an object from going to + zero. In this case, the cycle will be later detected and deleted + by the *cyclic garbage collector*. A common cause of reference + cycles is when an exception has been caught in a local variable. + The frame’s locals then reference the exception, which references + its own traceback, which references the locals of all frames caught + in the traceback. + + See also: Documentation for the "gc" module. + + Warning: + + Due to the precarious circumstances under which "__del__()" + methods are invoked, exceptions that occur during their execution + are ignored, and a warning is printed to "sys.stderr" instead. + In particular: + + * "__del__()" can be invoked when arbitrary code is being + executed, including from any arbitrary thread. If "__del__()" + needs to take a lock or invoke any other blocking resource, it + may deadlock as the resource may already be taken by the code + that gets interrupted to execute "__del__()". + + * "__del__()" can be executed during interpreter shutdown. As a + consequence, the global variables it needs to access (including + other modules) may already have been deleted or set to "None". + Python guarantees that globals whose name begins with a single + underscore are deleted from their module before other globals + are deleted; if no other references to such globals exist, this + may help in assuring that imported modules are still available + at the time when the "__del__()" method is called. + +object.__repr__(self) + + Called by the "repr()" built-in function to compute the “official†+ string representation of an object. If at all possible, this + should look like a valid Python expression that could be used to + recreate an object with the same value (given an appropriate + environment). If this is not possible, a string of the form + "<...some useful description...>" should be returned. The return + value must be a string object. If a class defines "__repr__()" but + not "__str__()", then "__repr__()" is also used when an “informal†+ string representation of instances of that class is required. + + This is typically used for debugging, so it is important that the + representation is information-rich and unambiguous. A default + implementation is provided by the "object" class itself. + +object.__str__(self) + + Called by "str(object)", the default "__format__()" implementation, + and the built-in function "print()", to compute the “informal†or + nicely printable string representation of an object. The return + value must be a str object. + + This method differs from "object.__repr__()" in that there is no + expectation that "__str__()" return a valid Python expression: a + more convenient or concise representation can be used. + + The default implementation defined by the built-in type "object" + calls "object.__repr__()". + +object.__bytes__(self) + + Called by bytes to compute a byte-string representation of an + object. This should return a "bytes" object. The "object" class + itself does not provide this method. + +object.__format__(self, format_spec) + + Called by the "format()" built-in function, and by extension, + evaluation of formatted string literals and the "str.format()" + method, to produce a “formatted†string representation of an + object. The *format_spec* argument is a string that contains a + description of the formatting options desired. The interpretation + of the *format_spec* argument is up to the type implementing + "__format__()", however most classes will either delegate + formatting to one of the built-in types, or use a similar + formatting option syntax. + + See Format Specification Mini-Language for a description of the + standard formatting syntax. + + The return value must be a string object. + + The default implementation by the "object" class should be given an + empty *format_spec* string. It delegates to "__str__()". + + Changed in version 3.4: The __format__ method of "object" itself + raises a "TypeError" if passed any non-empty string. + + Changed in version 3.7: "object.__format__(x, '')" is now + equivalent to "str(x)" rather than "format(str(x), '')". + +object.__lt__(self, other) +object.__le__(self, other) +object.__eq__(self, other) +object.__ne__(self, other) +object.__gt__(self, other) +object.__ge__(self, other) + + These are the so-called “rich comparison†methods. The + correspondence between operator symbols and method names is as + follows: "xy" calls + "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)". + + A rich comparison method may return the singleton "NotImplemented" + if it does not implement the operation for a given pair of + arguments. By convention, "False" and "True" are returned for a + successful comparison. However, these methods can return any value, + so if the comparison operator is used in a Boolean context (e.g., + in the condition of an "if" statement), Python will call "bool()" + on the value to determine if the result is true or false. + + By default, "object" implements "__eq__()" by using "is", returning + "NotImplemented" in the case of a false comparison: "True if x is y + else NotImplemented". For "__ne__()", by default it delegates to + "__eq__()" and inverts the result unless it is "NotImplemented". + There are no other implied relationships among the comparison + operators or default implementations; for example, the truth of + "(x.__hash__". + + If a class that does not override "__eq__()" wishes to suppress + hash support, it should include "__hash__ = None" in the class + definition. A class which defines its own "__hash__()" that + explicitly raises a "TypeError" would be incorrectly identified as + hashable by an "isinstance(obj, collections.abc.Hashable)" call. + + Note: + + By default, the "__hash__()" values of str and bytes objects are + “salted†with an unpredictable random value. Although they + remain constant within an individual Python process, they are not + predictable between repeated invocations of Python.This is + intended to provide protection against a denial-of-service caused + by carefully chosen inputs that exploit the worst case + performance of a dict insertion, *O*(*n*^2) complexity. See + http://ocert.org/advisories/ocert-2011-003.html for + details.Changing hash values affects the iteration order of sets. + Python has never made guarantees about this ordering (and it + typically varies between 32-bit and 64-bit builds).See also + "PYTHONHASHSEED". + + Changed in version 3.3: Hash randomization is enabled by default. + +object.__bool__(self) + + Called to implement truth value testing and the built-in operation + "bool()"; should return "False" or "True". When this method is not + defined, "__len__()" is called, if it is defined, and the object is + considered true if its result is nonzero. If a class defines + neither "__len__()" nor "__bool__()" (which is true of the "object" + class itself), all its instances are considered true. +''', + 'debugger': r'''"pdb" — The Python Debugger +*************************** + +**Source code:** Lib/pdb.py + +====================================================================== + +The module "pdb" defines an interactive source code debugger for +Python programs. It supports setting (conditional) breakpoints and +single stepping at the source line level, inspection of stack frames, +source code listing, and evaluation of arbitrary Python code in the +context of any stack frame. It also supports post-mortem debugging +and can be called under program control. + +The debugger is extensible – it is actually defined as the class +"Pdb". This is currently undocumented but easily understood by reading +the source. The extension interface uses the modules "bdb" and "cmd". + +See also: + + Module "faulthandler" + Used to dump Python tracebacks explicitly, on a fault, after a + timeout, or on a user signal. + + Module "traceback" + Standard interface to extract, format and print stack traces of + Python programs. + +The typical usage to break into the debugger is to insert: + + import pdb; pdb.set_trace() + +Or: + + breakpoint() + +at the location you want to break into the debugger, and then run the +program. You can then step through the code following this statement, +and continue running without the debugger using the "continue" +command. + +Changed in version 3.7: The built-in "breakpoint()", when called with +defaults, can be used instead of "import pdb; pdb.set_trace()". + + def double(x): + breakpoint() + return x * 2 + val = 3 + print(f"{val} * 2 is {double(val)}") + +The debugger’s prompt is "(Pdb)", which is the indicator that you are +in debug mode: + + > ...(2)double() + -> breakpoint() + (Pdb) p x + 3 + (Pdb) continue + 3 * 2 is 6 + +Changed in version 3.3: Tab-completion via the "readline" module is +available for commands and command arguments, e.g. the current global +and local names are offered as arguments of the "p" command. + +You can also invoke "pdb" from the command line to debug other +scripts. For example: + + python -m pdb myscript.py + +When invoked as a module, pdb will automatically enter post-mortem +debugging if the program being debugged exits abnormally. After post- +mortem debugging (or after normal exit of the program), pdb will +restart the program. Automatic restarting preserves pdb’s state (such +as breakpoints) and in most cases is more useful than quitting the +debugger upon program’s exit. + +Changed in version 3.2: Added the "-c" option to execute commands as +if given in a ".pdbrc" file; see Debugger Commands. + +Changed in version 3.7: Added the "-m" option to execute modules +similar to the way "python -m" does. As with a script, the debugger +will pause execution just before the first line of the module. + +Typical usage to execute a statement under control of the debugger is: + + >>> import pdb + >>> def f(x): + ... print(1 / x) + >>> pdb.run("f(2)") + > (1)() + (Pdb) continue + 0.5 + >>> + +The typical usage to inspect a crashed program is: + + >>> import pdb + >>> def f(x): + ... print(1 / x) + ... + >>> f(0) + Traceback (most recent call last): + File "", line 1, in + File "", line 2, in f + ZeroDivisionError: division by zero + >>> pdb.pm() + > (2)f() + (Pdb) p x + 0 + (Pdb) + +Changed in version 3.13: The implementation of **PEP 667** means that +name assignments made via "pdb" will immediately affect the active +scope, even when running inside an *optimized scope*. + +The module defines the following functions; each enters the debugger +in a slightly different way: + +pdb.run(statement, globals=None, locals=None) + + Execute the *statement* (given as a string or a code object) under + debugger control. The debugger prompt appears before any code is + executed; you can set breakpoints and type "continue", or you can + step through the statement using "step" or "next" (all these + commands are explained below). The optional *globals* and *locals* + arguments specify the environment in which the code is executed; by + default the dictionary of the module "__main__" is used. (See the + explanation of the built-in "exec()" or "eval()" functions.) + +pdb.runeval(expression, globals=None, locals=None) + + Evaluate the *expression* (given as a string or a code object) + under debugger control. When "runeval()" returns, it returns the + value of the *expression*. Otherwise this function is similar to + "run()". + +pdb.runcall(function, *args, **kwds) + + Call the *function* (a function or method object, not a string) + with the given arguments. When "runcall()" returns, it returns + whatever the function call returned. The debugger prompt appears + as soon as the function is entered. + +pdb.set_trace(*, header=None, commands=None) + + Enter the debugger at the calling stack frame. This is useful to + hard-code a breakpoint at a given point in a program, even if the + code is not otherwise being debugged (e.g. when an assertion + fails). If given, *header* is printed to the console just before + debugging begins. The *commands* argument, if given, is a list of + commands to execute when the debugger starts. + + Changed in version 3.7: The keyword-only argument *header*. + + Changed in version 3.13: "set_trace()" will enter the debugger + immediately, rather than on the next line of code to be executed. + + Added in version 3.14: The *commands* argument. + +pdb.post_mortem(t=None) + + Enter post-mortem debugging of the given exception or traceback + object. If no value is given, it uses the exception that is + currently being handled, or raises "ValueError" if there isn’t one. + + Changed in version 3.13: Support for exception objects was added. + +pdb.pm() + + Enter post-mortem debugging of the exception found in + "sys.last_exc". + +The "run*" functions and "set_trace()" are aliases for instantiating +the "Pdb" class and calling the method of the same name. If you want +to access further features, you have to do this yourself: + +class pdb.Pdb(completekey='tab', stdin=None, stdout=None, skip=None, nosigint=False, readrc=True, mode=None) + + "Pdb" is the debugger class. + + The *completekey*, *stdin* and *stdout* arguments are passed to the + underlying "cmd.Cmd" class; see the description there. + + The *skip* argument, if given, must be an iterable of glob-style + module name patterns. The debugger will not step into frames that + originate in a module that matches one of these patterns. [1] + + By default, Pdb sets a handler for the SIGINT signal (which is sent + when the user presses "Ctrl"-"C" on the console) when you give a + "continue" command. This allows you to break into the debugger + again by pressing "Ctrl"-"C". If you want Pdb not to touch the + SIGINT handler, set *nosigint* to true. + + The *readrc* argument defaults to true and controls whether Pdb + will load .pdbrc files from the filesystem. + + The *mode* argument specifies how the debugger was invoked. It + impacts the workings of some debugger commands. Valid values are + "'inline'" (used by the breakpoint() builtin), "'cli'" (used by the + command line invocation) or "None" (for backwards compatible + behaviour, as before the *mode* argument was added). + + Example call to enable tracing with *skip*: + + import pdb; pdb.Pdb(skip=['django.*']).set_trace() + + Raises an auditing event "pdb.Pdb" with no arguments. + + Changed in version 3.1: Added the *skip* parameter. + + Changed in version 3.2: Added the *nosigint* parameter. Previously, + a SIGINT handler was never set by Pdb. + + Changed in version 3.6: The *readrc* argument. + + Added in version 3.14: Added the *mode* argument. + + run(statement, globals=None, locals=None) + runeval(expression, globals=None, locals=None) + runcall(function, *args, **kwds) + set_trace() + + See the documentation for the functions explained above. + + +Debugger Commands +================= + +The commands recognized by the debugger are listed below. Most +commands can be abbreviated to one or two letters as indicated; e.g. +"h(elp)" means that either "h" or "help" can be used to enter the help +command (but not "he" or "hel", nor "H" or "Help" or "HELP"). +Arguments to commands must be separated by whitespace (spaces or +tabs). Optional arguments are enclosed in square brackets ("[]") in +the command syntax; the square brackets must not be typed. +Alternatives in the command syntax are separated by a vertical bar +("|"). + +Entering a blank line repeats the last command entered. Exception: if +the last command was a "list" command, the next 11 lines are listed. + +Commands that the debugger doesn’t recognize are assumed to be Python +statements and are executed in the context of the program being +debugged. Python statements can also be prefixed with an exclamation +point ("!"). This is a powerful way to inspect the program being +debugged; it is even possible to change a variable or call a function. +When an exception occurs in such a statement, the exception name is +printed but the debugger’s state is not changed. + +Changed in version 3.13: Expressions/Statements whose prefix is a pdb +command are now correctly identified and executed. + +The debugger supports aliases. Aliases can have parameters which +allows one a certain level of adaptability to the context under +examination. + +Multiple commands may be entered on a single line, separated by ";;". +(A single ";" is not used as it is the separator for multiple commands +in a line that is passed to the Python parser.) No intelligence is +applied to separating the commands; the input is split at the first +";;" pair, even if it is in the middle of a quoted string. A +workaround for strings with double semicolons is to use implicit +string concatenation "';'';'" or "";"";"". + +To set a temporary global variable, use a *convenience variable*. A +*convenience variable* is a variable whose name starts with "$". For +example, "$foo = 1" sets a global variable "$foo" which you can use in +the debugger session. The *convenience variables* are cleared when +the program resumes execution so it’s less likely to interfere with +your program compared to using normal variables like "foo = 1". + +There are three preset *convenience variables*: + +* "$_frame": the current frame you are debugging + +* "$_retval": the return value if the frame is returning + +* "$_exception": the exception if the frame is raising an exception + +Added in version 3.12: Added the *convenience variable* feature. + +If a file ".pdbrc" exists in the user’s home directory or in the +current directory, it is read with "'utf-8'" encoding and executed as +if it had been typed at the debugger prompt, with the exception that +empty lines and lines starting with "#" are ignored. This is +particularly useful for aliases. If both files exist, the one in the +home directory is read first and aliases defined there can be +overridden by the local file. + +Changed in version 3.2: ".pdbrc" can now contain commands that +continue debugging, such as "continue" or "next". Previously, these +commands had no effect. + +Changed in version 3.11: ".pdbrc" is now read with "'utf-8'" encoding. +Previously, it was read with the system locale encoding. + +h(elp) [command] + + Without argument, print the list of available commands. With a + *command* as argument, print help about that command. "help pdb" + displays the full documentation (the docstring of the "pdb" + module). Since the *command* argument must be an identifier, "help + exec" must be entered to get help on the "!" command. + +w(here) [count] + + Print a stack trace, with the most recent frame at the bottom. if + *count* is 0, print the current frame entry. If *count* is + negative, print the least recent - *count* frames. If *count* is + positive, print the most recent *count* frames. An arrow (">") + indicates the current frame, which determines the context of most + commands. + + Changed in version 3.14: *count* argument is added. + +d(own) [count] + + Move the current frame *count* (default one) levels down in the + stack trace (to a newer frame). + +u(p) [count] + + Move the current frame *count* (default one) levels up in the stack + trace (to an older frame). + +b(reak) [([filename:]lineno | function) [, condition]] + + With a *lineno* argument, set a break at line *lineno* in the + current file. The line number may be prefixed with a *filename* and + a colon, to specify a breakpoint in another file (possibly one that + hasn’t been loaded yet). The file is searched on "sys.path". + Acceptable forms of *filename* are "/abspath/to/file.py", + "relpath/file.py", "module" and "package.module". + + With a *function* argument, set a break at the first executable + statement within that function. *function* can be any expression + that evaluates to a function in the current namespace. + + If a second argument is present, it is an expression which must + evaluate to true before the breakpoint is honored. + + Without argument, list all breaks, including for each breakpoint, + the number of times that breakpoint has been hit, the current + ignore count, and the associated condition if any. + + Each breakpoint is assigned a number to which all the other + breakpoint commands refer. + +tbreak [([filename:]lineno | function) [, condition]] + + Temporary breakpoint, which is removed automatically when it is + first hit. The arguments are the same as for "break". + +cl(ear) [filename:lineno | bpnumber ...] + + With a *filename:lineno* argument, clear all the breakpoints at + this line. With a space separated list of breakpoint numbers, clear + those breakpoints. Without argument, clear all breaks (but first + ask confirmation). + +disable bpnumber [bpnumber ...] + + Disable the breakpoints given as a space separated list of + breakpoint numbers. Disabling a breakpoint means it cannot cause + the program to stop execution, but unlike clearing a breakpoint, it + remains in the list of breakpoints and can be (re-)enabled. + +enable bpnumber [bpnumber ...] + + Enable the breakpoints specified. + +ignore bpnumber [count] + + Set the ignore count for the given breakpoint number. If *count* + is omitted, the ignore count is set to 0. A breakpoint becomes + active when the ignore count is zero. When non-zero, the *count* + is decremented each time the breakpoint is reached and the + breakpoint is not disabled and any associated condition evaluates + to true. + +condition bpnumber [condition] + + Set a new *condition* for the breakpoint, an expression which must + evaluate to true before the breakpoint is honored. If *condition* + is absent, any existing condition is removed; i.e., the breakpoint + is made unconditional. + +commands [bpnumber] + + Specify a list of commands for breakpoint number *bpnumber*. The + commands themselves appear on the following lines. Type a line + containing just "end" to terminate the commands. An example: + + (Pdb) commands 1 + (com) p some_variable + (com) end + (Pdb) + + To remove all commands from a breakpoint, type "commands" and + follow it immediately with "end"; that is, give no commands. + + With no *bpnumber* argument, "commands" refers to the last + breakpoint set. + + You can use breakpoint commands to start your program up again. + Simply use the "continue" command, or "step", or any other command + that resumes execution. + + Specifying any command resuming execution (currently "continue", + "step", "next", "return", "until", "jump", "quit" and their + abbreviations) terminates the command list (as if that command was + immediately followed by end). This is because any time you resume + execution (even with a simple next or step), you may encounter + another breakpoint—which could have its own command list, leading + to ambiguities about which list to execute. + + If the list of commands contains the "silent" command, or a command + that resumes execution, then the breakpoint message containing + information about the frame is not displayed. + + Changed in version 3.14: Frame information will not be displayed if + a command that resumes execution is present in the command list. + +s(tep) + + Execute the current line, stop at the first possible occasion + (either in a function that is called or on the next line in the + current function). + +n(ext) + + Continue execution until the next line in the current function is + reached or it returns. (The difference between "next" and "step" + is that "step" stops inside a called function, while "next" + executes called functions at (nearly) full speed, only stopping at + the next line in the current function.) + +unt(il) [lineno] + + Without argument, continue execution until the line with a number + greater than the current one is reached. + + With *lineno*, continue execution until a line with a number + greater or equal to *lineno* is reached. In both cases, also stop + when the current frame returns. + + Changed in version 3.2: Allow giving an explicit line number. + +r(eturn) + + Continue execution until the current function returns. + +c(ont(inue)) + + Continue execution, only stop when a breakpoint is encountered. + +j(ump) lineno + + Set the next line that will be executed. Only available in the + bottom-most frame. This lets you jump back and execute code again, + or jump forward to skip code that you don’t want to run. + + It should be noted that not all jumps are allowed – for instance it + is not possible to jump into the middle of a "for" loop or out of a + "finally" clause. + +l(ist) [first[, last]] + + List source code for the current file. Without arguments, list 11 + lines around the current line or continue the previous listing. + With "." as argument, list 11 lines around the current line. With + one argument, list 11 lines around at that line. With two + arguments, list the given range; if the second argument is less + than the first, it is interpreted as a count. + + The current line in the current frame is indicated by "->". If an + exception is being debugged, the line where the exception was + originally raised or propagated is indicated by ">>", if it differs + from the current line. + + Changed in version 3.2: Added the ">>" marker. + +ll | longlist + + List all source code for the current function or frame. + Interesting lines are marked as for "list". + + Added in version 3.2. + +a(rgs) + + Print the arguments of the current function and their current + values. + +p expression + + Evaluate *expression* in the current context and print its value. + + Note: + + "print()" can also be used, but is not a debugger command — this + executes the Python "print()" function. + +pp expression + + Like the "p" command, except the value of *expression* is pretty- + printed using the "pprint" module. + +whatis expression + + Print the type of *expression*. + +source expression + + Try to get source code of *expression* and display it. + + Added in version 3.2. + +display [expression] + + Display the value of *expression* if it changed, each time + execution stops in the current frame. + + Without *expression*, list all display expressions for the current + frame. + + Note: + + Display evaluates *expression* and compares to the result of the + previous evaluation of *expression*, so when the result is + mutable, display may not be able to pick up the changes. + + Example: + + lst = [] + breakpoint() + pass + lst.append(1) + print(lst) + + Display won’t realize "lst" has been changed because the result of + evaluation is modified in place by "lst.append(1)" before being + compared: + + > example.py(3)() + -> pass + (Pdb) display lst + display lst: [] + (Pdb) n + > example.py(4)() + -> lst.append(1) + (Pdb) n + > example.py(5)() + -> print(lst) + (Pdb) + + You can do some tricks with copy mechanism to make it work: + + > example.py(3)() + -> pass + (Pdb) display lst[:] + display lst[:]: [] + (Pdb) n + > example.py(4)() + -> lst.append(1) + (Pdb) n + > example.py(5)() + -> print(lst) + display lst[:]: [1] [old: []] + (Pdb) + + Added in version 3.2. + +undisplay [expression] + + Do not display *expression* anymore in the current frame. Without + *expression*, clear all display expressions for the current frame. + + Added in version 3.2. + +interact + + Start an interactive interpreter (using the "code" module) in a new + global namespace initialised from the local and global namespaces + for the current scope. Use "exit()" or "quit()" to exit the + interpreter and return to the debugger. + + Note: + + As "interact" creates a new dedicated namespace for code + execution, assignments to variables will not affect the original + namespaces. However, modifications to any referenced mutable + objects will be reflected in the original namespaces as usual. + + Added in version 3.2. + + Changed in version 3.13: "exit()" and "quit()" can be used to exit + the "interact" command. + + Changed in version 3.13: "interact" directs its output to the + debugger’s output channel rather than "sys.stderr". + +alias [name [command]] + + Create an alias called *name* that executes *command*. The + *command* must *not* be enclosed in quotes. Replaceable parameters + can be indicated by "%1", "%2", … and "%9", while "%*" is replaced + by all the parameters. If *command* is omitted, the current alias + for *name* is shown. If no arguments are given, all aliases are + listed. + + Aliases may be nested and can contain anything that can be legally + typed at the pdb prompt. Note that internal pdb commands *can* be + overridden by aliases. Such a command is then hidden until the + alias is removed. Aliasing is recursively applied to the first + word of the command line; all other words in the line are left + alone. + + As an example, here are two useful aliases (especially when placed + in the ".pdbrc" file): + + # Print instance variables (usage "pi classInst") + alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = {%1.__dict__[k]}") + # Print instance variables in self + alias ps pi self + +unalias name + + Delete the specified alias *name*. + +! statement + + Execute the (one-line) *statement* in the context of the current + stack frame. The exclamation point can be omitted unless the first + word of the statement resembles a debugger command, e.g.: + + (Pdb) ! n=42 + (Pdb) + + To set a global variable, you can prefix the assignment command + with a "global" statement on the same line, e.g.: + + (Pdb) global list_options; list_options = ['-l'] + (Pdb) + +run [args ...] +restart [args ...] + + Restart the debugged Python program. If *args* is supplied, it is + split with "shlex" and the result is used as the new "sys.argv". + History, breakpoints, actions and debugger options are preserved. + "restart" is an alias for "run". + + Changed in version 3.14: "run" and "restart" commands are disabled + when the debugger is invoked in "'inline'" mode. + +q(uit) + + Quit from the debugger. The program being executed is aborted. + +debug code + + Enter a recursive debugger that steps through *code* (which is an + arbitrary expression or statement to be executed in the current + environment). + +retval + + Print the return value for the last return of the current function. + +exceptions [excnumber] + + List or jump between chained exceptions. + + When using "pdb.pm()" or "Pdb.post_mortem(...)" with a chained + exception instead of a traceback, it allows the user to move + between the chained exceptions using "exceptions" command to list + exceptions, and "exception " to switch to that exception. + + Example: + + def out(): + try: + middle() + except Exception as e: + raise ValueError("reraise middle() error") from e + + def middle(): + try: + return inner(0) + except Exception as e: + raise ValueError("Middle fail") + + def inner(x): + 1 / x + + out() + + calling "pdb.pm()" will allow to move between exceptions: + + > example.py(5)out() + -> raise ValueError("reraise middle() error") from e + + (Pdb) exceptions + 0 ZeroDivisionError('division by zero') + 1 ValueError('Middle fail') + > 2 ValueError('reraise middle() error') + + (Pdb) exceptions 0 + > example.py(16)inner() + -> 1 / x + + (Pdb) up + > example.py(10)middle() + -> return inner(0) + + Added in version 3.13. + +-[ Footnotes ]- + +[1] Whether a frame is considered to originate in a certain module is + determined by the "__name__" in the frame globals. +''', + 'del': r'''The "del" statement +******************* + + del_stmt ::= "del" target_list + +Deletion is recursively defined very similar to the way assignment is +defined. Rather than spelling it out in full details, here are some +hints. + +Deletion of a target list recursively deletes each target, from left +to right. + +Deletion of a name removes the binding of that name from the local or +global namespace, depending on whether the name occurs in a "global" +statement in the same code block. If the name is unbound, a +"NameError" exception will be raised. + +Deletion of attribute references, subscriptions and slicings is passed +to the primary object involved; deletion of a slicing is in general +equivalent to assignment of an empty slice of the right type (but even +this is determined by the sliced object). + +Changed in version 3.2: Previously it was illegal to delete a name +from the local namespace if it occurs as a free variable in a nested +block. +''', + 'dict': r'''Dictionary displays +******************* + +A dictionary display is a possibly empty series of dict items +(key/value pairs) enclosed in curly braces: + + dict_display ::= "{" [dict_item_list | dict_comprehension] "}" + dict_item_list ::= dict_item ("," dict_item)* [","] + dict_item ::= expression ":" expression | "**" or_expr + dict_comprehension ::= expression ":" expression comp_for + +A dictionary display yields a new dictionary object. + +If a comma-separated sequence of dict items is given, they are +evaluated from left to right to define the entries of the dictionary: +each key object is used as a key into the dictionary to store the +corresponding value. This means that you can specify the same key +multiple times in the dict item list, and the final dictionary’s value +for that key will be the last one given. + +A double asterisk "**" denotes *dictionary unpacking*. Its operand +must be a *mapping*. Each mapping item is added to the new +dictionary. Later values replace values already set by earlier dict +items and earlier dictionary unpackings. + +Added in version 3.5: Unpacking into dictionary displays, originally +proposed by **PEP 448**. + +A dict comprehension, in contrast to list and set comprehensions, +needs two expressions separated with a colon followed by the usual +“for†and “if†clauses. When the comprehension is run, the resulting +key and value elements are inserted in the new dictionary in the order +they are produced. + +Restrictions on the types of the key values are listed earlier in +section The standard type hierarchy. (To summarize, the key type +should be *hashable*, which excludes all mutable objects.) Clashes +between duplicate keys are not detected; the last value (textually +rightmost in the display) stored for a given key value prevails. + +Changed in version 3.8: Prior to Python 3.8, in dict comprehensions, +the evaluation order of key and value was not well-defined. In +CPython, the value was evaluated before the key. Starting with 3.8, +the key is evaluated before the value, as proposed by **PEP 572**. +''', + 'dynamic-features': r'''Interaction with dynamic features +********************************* + +Name resolution of free variables occurs at runtime, not at compile +time. This means that the following code will print 42: + + i = 10 + def f(): + print(i) + i = 42 + f() + +The "eval()" and "exec()" functions do not have access to the full +environment for resolving names. Names may be resolved in the local +and global namespaces of the caller. Free variables are not resolved +in the nearest enclosing namespace, but in the global namespace. [1] +The "exec()" and "eval()" functions have optional arguments to +override the global and local namespace. If only one namespace is +specified, it is used for both. +''', + 'else': r'''The "if" statement +****************** + +The "if" statement is used for conditional execution: + + if_stmt ::= "if" assignment_expression ":" suite + ("elif" assignment_expression ":" suite)* + ["else" ":" suite] + +It selects exactly one of the suites by evaluating the expressions one +by one until one is found to be true (see section Boolean operations +for the definition of true and false); then that suite is executed +(and no other part of the "if" statement is executed or evaluated). +If all expressions are false, the suite of the "else" clause, if +present, is executed. +''', + 'exceptions': r'''Exceptions +********** + +Exceptions are a means of breaking out of the normal flow of control +of a code block in order to handle errors or other exceptional +conditions. An exception is *raised* at the point where the error is +detected; it may be *handled* by the surrounding code block or by any +code block that directly or indirectly invoked the code block where +the error occurred. + +The Python interpreter raises an exception when it detects a run-time +error (such as division by zero). A Python program can also +explicitly raise an exception with the "raise" statement. Exception +handlers are specified with the "try" … "except" statement. The +"finally" clause of such a statement can be used to specify cleanup +code which does not handle the exception, but is executed whether an +exception occurred or not in the preceding code. + +Python uses the “termination†model of error handling: an exception +handler can find out what happened and continue execution at an outer +level, but it cannot repair the cause of the error and retry the +failing operation (except by re-entering the offending piece of code +from the top). + +When an exception is not handled at all, the interpreter terminates +execution of the program, or returns to its interactive main loop. In +either case, it prints a stack traceback, except when the exception is +"SystemExit". + +Exceptions are identified by class instances. The "except" clause is +selected depending on the class of the instance: it must reference the +class of the instance or a *non-virtual base class* thereof. The +instance can be received by the handler and can carry additional +information about the exceptional condition. + +Note: + + Exception messages are not part of the Python API. Their contents + may change from one version of Python to the next without warning + and should not be relied on by code which will run under multiple + versions of the interpreter. + +See also the description of the "try" statement in section The try +statement and "raise" statement in section The raise statement. + +-[ Footnotes ]- + +[1] This limitation occurs because the code that is executed by these + operations is not available at the time the module is compiled. +''', + 'execmodel': r'''Execution model +*************** + + +Structure of a program +====================== + +A Python program is constructed from code blocks. A *block* is a piece +of Python program text that is executed as a unit. The following are +blocks: a module, a function body, and a class definition. Each +command typed interactively is a block. A script file (a file given +as standard input to the interpreter or specified as a command line +argument to the interpreter) is a code block. A script command (a +command specified on the interpreter command line with the "-c" +option) is a code block. A module run as a top level script (as module +"__main__") from the command line using a "-m" argument is also a code +block. The string argument passed to the built-in functions "eval()" +and "exec()" is a code block. + +A code block is executed in an *execution frame*. A frame contains +some administrative information (used for debugging) and determines +where and how execution continues after the code block’s execution has +completed. + + +Naming and binding +================== + + +Binding of names +---------------- + +*Names* refer to objects. Names are introduced by name binding +operations. + +The following constructs bind names: + +* formal parameters to functions, + +* class definitions, + +* function definitions, + +* assignment expressions, + +* targets that are identifiers if occurring in an assignment: + + * "for" loop header, + + * after "as" in a "with" statement, "except" clause, "except*" + clause, or in the as-pattern in structural pattern matching, + + * in a capture pattern in structural pattern matching + +* "import" statements. + +* "type" statements. + +* type parameter lists. + +The "import" statement of the form "from ... import *" binds all names +defined in the imported module, except those beginning with an +underscore. This form may only be used at the module level. + +A target occurring in a "del" statement is also considered bound for +this purpose (though the actual semantics are to unbind the name). + +Each assignment or import statement occurs within a block defined by a +class or function definition or at the module level (the top-level +code block). + +If a name is bound in a block, it is a local variable of that block, +unless declared as "nonlocal" or "global". If a name is bound at the +module level, it is a global variable. (The variables of the module +code block are local and global.) If a variable is used in a code +block but not defined there, it is a *free variable*. + +Each occurrence of a name in the program text refers to the *binding* +of that name established by the following name resolution rules. + + +Resolution of names +------------------- + +A *scope* defines the visibility of a name within a block. If a local +variable is defined in a block, its scope includes that block. If the +definition occurs in a function block, the scope extends to any blocks +contained within the defining one, unless a contained block introduces +a different binding for the name. + +When a name is used in a code block, it is resolved using the nearest +enclosing scope. The set of all such scopes visible to a code block +is called the block’s *environment*. + +When a name is not found at all, a "NameError" exception is raised. If +the current scope is a function scope, and the name refers to a local +variable that has not yet been bound to a value at the point where the +name is used, an "UnboundLocalError" exception is raised. +"UnboundLocalError" is a subclass of "NameError". + +If a name binding operation occurs anywhere within a code block, all +uses of the name within the block are treated as references to the +current block. This can lead to errors when a name is used within a +block before it is bound. This rule is subtle. Python lacks +declarations and allows name binding operations to occur anywhere +within a code block. The local variables of a code block can be +determined by scanning the entire text of the block for name binding +operations. See the FAQ entry on UnboundLocalError for examples. + +If the "global" statement occurs within a block, all uses of the names +specified in the statement refer to the bindings of those names in the +top-level namespace. Names are resolved in the top-level namespace by +searching the global namespace, i.e. the namespace of the module +containing the code block, and the builtins namespace, the namespace +of the module "builtins". The global namespace is searched first. If +the names are not found there, the builtins namespace is searched +next. If the names are also not found in the builtins namespace, new +variables are created in the global namespace. The global statement +must precede all uses of the listed names. + +The "global" statement has the same scope as a name binding operation +in the same block. If the nearest enclosing scope for a free variable +contains a global statement, the free variable is treated as a global. + +The "nonlocal" statement causes corresponding names to refer to +previously bound variables in the nearest enclosing function scope. +"SyntaxError" is raised at compile time if the given name does not +exist in any enclosing function scope. Type parameters cannot be +rebound with the "nonlocal" statement. + +The namespace for a module is automatically created the first time a +module is imported. The main module for a script is always called +"__main__". + +Class definition blocks and arguments to "exec()" and "eval()" are +special in the context of name resolution. A class definition is an +executable statement that may use and define names. These references +follow the normal rules for name resolution with an exception that +unbound local variables are looked up in the global namespace. The +namespace of the class definition becomes the attribute dictionary of +the class. The scope of names defined in a class block is limited to +the class block; it does not extend to the code blocks of methods. +This includes comprehensions and generator expressions, but it does +not include annotation scopes, which have access to their enclosing +class scopes. This means that the following will fail: + + class A: + a = 42 + b = list(a + i for i in range(10)) + +However, the following will succeed: + + class A: + type Alias = Nested + class Nested: pass + + print(A.Alias.__value__) # + + +Annotation scopes +----------------- + +*Annotations*, type parameter lists and "type" statements introduce +*annotation scopes*, which behave mostly like function scopes, but +with some exceptions discussed below. + +Annotation scopes are used in the following contexts: + +* *Function annotations*. + +* *Variable annotations*. + +* Type parameter lists for generic type aliases. + +* Type parameter lists for generic functions. A generic function’s + annotations are executed within the annotation scope, but its + defaults and decorators are not. + +* Type parameter lists for generic classes. A generic class’s base + classes and keyword arguments are executed within the annotation + scope, but its decorators are not. + +* The bounds, constraints, and default values for type parameters + (lazily evaluated). + +* The value of type aliases (lazily evaluated). + +Annotation scopes differ from function scopes in the following ways: + +* Annotation scopes have access to their enclosing class namespace. If + an annotation scope is immediately within a class scope, or within + another annotation scope that is immediately within a class scope, + the code in the annotation scope can use names defined in the class + scope as if it were executed directly within the class body. This + contrasts with regular functions defined within classes, which + cannot access names defined in the class scope. + +* Expressions in annotation scopes cannot contain "yield", "yield + from", "await", or ":=" expressions. (These expressions are allowed + in other scopes contained within the annotation scope.) + +* Names defined in annotation scopes cannot be rebound with "nonlocal" + statements in inner scopes. This includes only type parameters, as + no other syntactic elements that can appear within annotation scopes + can introduce new names. + +* While annotation scopes have an internal name, that name is not + reflected in the *qualified name* of objects defined within the + scope. Instead, the "__qualname__" of such objects is as if the + object were defined in the enclosing scope. + +Added in version 3.12: Annotation scopes were introduced in Python +3.12 as part of **PEP 695**. + +Changed in version 3.13: Annotation scopes are also used for type +parameter defaults, as introduced by **PEP 696**. + +Changed in version 3.14: Annotation scopes are now also used for +annotations, as specified in **PEP 649** and **PEP 749**. + + +Lazy evaluation +--------------- + +Most annotation scopes are *lazily evaluated*. This includes +annotations, the values of type aliases created through the "type" +statement, and the bounds, constraints, and default values of type +variables created through the type parameter syntax. This means that +they are not evaluated when the type alias or type variable is +created, or when the object carrying annotations is created. Instead, +they are only evaluated when necessary, for example when the +"__value__" attribute on a type alias is accessed. + +Example: + + >>> type Alias = 1/0 + >>> Alias.__value__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + >>> def func[T: 1/0](): pass + >>> T = func.__type_params__[0] + >>> T.__bound__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + +Here the exception is raised only when the "__value__" attribute of +the type alias or the "__bound__" attribute of the type variable is +accessed. + +This behavior is primarily useful for references to types that have +not yet been defined when the type alias or type variable is created. +For example, lazy evaluation enables creation of mutually recursive +type aliases: + + from typing import Literal + + type SimpleExpr = int | Parenthesized + type Parenthesized = tuple[Literal["("], Expr, Literal[")"]] + type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", "-"], Expr] + +Lazily evaluated values are evaluated in annotation scope, which means +that names that appear inside the lazily evaluated value are looked up +as if they were used in the immediately enclosing scope. + +Added in version 3.12. + + +Builtins and restricted execution +--------------------------------- + +**CPython implementation detail:** Users should not touch +"__builtins__"; it is strictly an implementation detail. Users +wanting to override values in the builtins namespace should "import" +the "builtins" module and modify its attributes appropriately. + +The builtins namespace associated with the execution of a code block +is actually found by looking up the name "__builtins__" in its global +namespace; this should be a dictionary or a module (in the latter case +the module’s dictionary is used). By default, when in the "__main__" +module, "__builtins__" is the built-in module "builtins"; when in any +other module, "__builtins__" is an alias for the dictionary of the +"builtins" module itself. + + +Interaction with dynamic features +--------------------------------- + +Name resolution of free variables occurs at runtime, not at compile +time. This means that the following code will print 42: + + i = 10 + def f(): + print(i) + i = 42 + f() + +The "eval()" and "exec()" functions do not have access to the full +environment for resolving names. Names may be resolved in the local +and global namespaces of the caller. Free variables are not resolved +in the nearest enclosing namespace, but in the global namespace. [1] +The "exec()" and "eval()" functions have optional arguments to +override the global and local namespace. If only one namespace is +specified, it is used for both. + + +Exceptions +========== + +Exceptions are a means of breaking out of the normal flow of control +of a code block in order to handle errors or other exceptional +conditions. An exception is *raised* at the point where the error is +detected; it may be *handled* by the surrounding code block or by any +code block that directly or indirectly invoked the code block where +the error occurred. + +The Python interpreter raises an exception when it detects a run-time +error (such as division by zero). A Python program can also +explicitly raise an exception with the "raise" statement. Exception +handlers are specified with the "try" … "except" statement. The +"finally" clause of such a statement can be used to specify cleanup +code which does not handle the exception, but is executed whether an +exception occurred or not in the preceding code. + +Python uses the “termination†model of error handling: an exception +handler can find out what happened and continue execution at an outer +level, but it cannot repair the cause of the error and retry the +failing operation (except by re-entering the offending piece of code +from the top). + +When an exception is not handled at all, the interpreter terminates +execution of the program, or returns to its interactive main loop. In +either case, it prints a stack traceback, except when the exception is +"SystemExit". + +Exceptions are identified by class instances. The "except" clause is +selected depending on the class of the instance: it must reference the +class of the instance or a *non-virtual base class* thereof. The +instance can be received by the handler and can carry additional +information about the exceptional condition. + +Note: + + Exception messages are not part of the Python API. Their contents + may change from one version of Python to the next without warning + and should not be relied on by code which will run under multiple + versions of the interpreter. + +See also the description of the "try" statement in section The try +statement and "raise" statement in section The raise statement. + +-[ Footnotes ]- + +[1] This limitation occurs because the code that is executed by these + operations is not available at the time the module is compiled. +''', + 'exprlists': r'''Expression lists +**************** + + starred_expression ::= ["*"] or_expr + flexible_expression ::= assignment_expression | starred_expression + flexible_expression_list ::= flexible_expression ("," flexible_expression)* [","] + starred_expression_list ::= starred_expression ("," starred_expression)* [","] + expression_list ::= expression ("," expression)* [","] + yield_list ::= expression_list | starred_expression "," [starred_expression_list] + +Except when part of a list or set display, an expression list +containing at least one comma yields a tuple. The length of the tuple +is the number of expressions in the list. The expressions are +evaluated from left to right. + +An asterisk "*" denotes *iterable unpacking*. Its operand must be an +*iterable*. The iterable is expanded into a sequence of items, which +are included in the new tuple, list, or set, at the site of the +unpacking. + +Added in version 3.5: Iterable unpacking in expression lists, +originally proposed by **PEP 448**. + +Added in version 3.11: Any item in an expression list may be starred. +See **PEP 646**. + +A trailing comma is required only to create a one-item tuple, such as +"1,"; it is optional in all other cases. A single expression without a +trailing comma doesn’t create a tuple, but rather yields the value of +that expression. (To create an empty tuple, use an empty pair of +parentheses: "()".) +''', + 'floating': r'''Floating-point literals +*********************** + +Floating-point literals are described by the following lexical +definitions: + + floatnumber ::= pointfloat | exponentfloat + pointfloat ::= [digitpart] fraction | digitpart "." + exponentfloat ::= (digitpart | pointfloat) exponent + digitpart ::= digit (["_"] digit)* + fraction ::= "." digitpart + exponent ::= ("e" | "E") ["+" | "-"] digitpart + +Note that the integer and exponent parts are always interpreted using +radix 10. For example, "077e010" is legal, and denotes the same number +as "77e10". The allowed range of floating-point literals is +implementation-dependent. As in integer literals, underscores are +supported for digit grouping. + +Some examples of floating-point literals: + + 3.14 10. .001 1e100 3.14e-10 0e0 3.14_15_93 + +Changed in version 3.6: Underscores are now allowed for grouping +purposes in literals. +''', + 'for': r'''The "for" statement +******************* + +The "for" statement is used to iterate over the elements of a sequence +(such as a string, tuple or list) or other iterable object: + + for_stmt ::= "for" target_list "in" starred_list ":" suite + ["else" ":" suite] + +The "starred_list" expression is evaluated once; it should yield an +*iterable* object. An *iterator* is created for that iterable. The +first item provided by the iterator is then assigned to the target +list using the standard rules for assignments (see Assignment +statements), and the suite is executed. This repeats for each item +provided by the iterator. When the iterator is exhausted, the suite +in the "else" clause, if present, is executed, and the loop +terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and continues +with the next item, or with the "else" clause if there is no next +item. + +The for-loop makes assignments to the variables in the target list. +This overwrites all previous assignments to those variables including +those made in the suite of the for-loop: + + for i in range(10): + print(i) + i = 5 # this will not affect the for-loop + # because i will be overwritten with the next + # index in the range + +Names in the target list are not deleted when the loop is finished, +but if the sequence is empty, they will not have been assigned to at +all by the loop. Hint: the built-in type "range()" represents +immutable arithmetic sequences of integers. For instance, iterating +"range(3)" successively yields 0, 1, and then 2. + +Changed in version 3.11: Starred elements are now allowed in the +expression list. +''', + 'formatstrings': r'''Format String Syntax +******************** + +The "str.format()" method and the "Formatter" class share the same +syntax for format strings (although in the case of "Formatter", +subclasses can define their own format string syntax). The syntax is +related to that of formatted string literals, but it is less +sophisticated and, in particular, does not support arbitrary +expressions. + +Format strings contain “replacement fields†surrounded by curly braces +"{}". Anything that is not contained in braces is considered literal +text, which is copied unchanged to the output. If you need to include +a brace character in the literal text, it can be escaped by doubling: +"{{" and "}}". + +The grammar for a replacement field is as follows: + + replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}" + field_name ::= arg_name ("." attribute_name | "[" element_index "]")* + arg_name ::= [identifier | digit+] + attribute_name ::= identifier + element_index ::= digit+ | index_string + index_string ::= + + conversion ::= "r" | "s" | "a" + format_spec ::= format-spec:format_spec + +In less formal terms, the replacement field can start with a +*field_name* that specifies the object whose value is to be formatted +and inserted into the output instead of the replacement field. The +*field_name* is optionally followed by a *conversion* field, which is +preceded by an exclamation point "'!'", and a *format_spec*, which is +preceded by a colon "':'". These specify a non-default format for the +replacement value. + +See also the Format Specification Mini-Language section. + +The *field_name* itself begins with an *arg_name* that is either a +number or a keyword. If it’s a number, it refers to a positional +argument, and if it’s a keyword, it refers to a named keyword +argument. An *arg_name* is treated as a number if a call to +"str.isdecimal()" on the string would return true. If the numerical +arg_names in a format string are 0, 1, 2, … in sequence, they can all +be omitted (not just some) and the numbers 0, 1, 2, … will be +automatically inserted in that order. Because *arg_name* is not quote- +delimited, it is not possible to specify arbitrary dictionary keys +(e.g., the strings "'10'" or "':-]'") within a format string. The +*arg_name* can be followed by any number of index or attribute +expressions. An expression of the form "'.name'" selects the named +attribute using "getattr()", while an expression of the form +"'[index]'" does an index lookup using "__getitem__()". + +Changed in version 3.1: The positional argument specifiers can be +omitted for "str.format()", so "'{} {}'.format(a, b)" is equivalent to +"'{0} {1}'.format(a, b)". + +Changed in version 3.4: The positional argument specifiers can be +omitted for "Formatter". + +Some simple format string examples: + + "First, thou shalt count to {0}" # References first positional argument + "Bring me a {}" # Implicitly references the first positional argument + "From {} to {}" # Same as "From {0} to {1}" + "My quest is {name}" # References keyword argument 'name' + "Weight in tons {0.weight}" # 'weight' attribute of first positional arg + "Units destroyed: {players[0]}" # First element of keyword argument 'players'. + +The *conversion* field causes a type coercion before formatting. +Normally, the job of formatting a value is done by the "__format__()" +method of the value itself. However, in some cases it is desirable to +force a type to be formatted as a string, overriding its own +definition of formatting. By converting the value to a string before +calling "__format__()", the normal formatting logic is bypassed. + +Three conversion flags are currently supported: "'!s'" which calls +"str()" on the value, "'!r'" which calls "repr()" and "'!a'" which +calls "ascii()". + +Some examples: + + "Harold's a clever {0!s}" # Calls str() on the argument first + "Bring out the holy {name!r}" # Calls repr() on the argument first + "More {!a}" # Calls ascii() on the argument first + +The *format_spec* field contains a specification of how the value +should be presented, including such details as field width, alignment, +padding, decimal precision and so on. Each value type can define its +own “formatting mini-language†or interpretation of the *format_spec*. + +Most built-in types support a common formatting mini-language, which +is described in the next section. + +A *format_spec* field can also include nested replacement fields +within it. These nested replacement fields may contain a field name, +conversion flag and format specification, but deeper nesting is not +allowed. The replacement fields within the format_spec are +substituted before the *format_spec* string is interpreted. This +allows the formatting of a value to be dynamically specified. + +See the Format examples section for some examples. + + +Format Specification Mini-Language +================================== + +“Format specifications†are used within replacement fields contained +within a format string to define how individual values are presented +(see Format String Syntax and f-strings). They can also be passed +directly to the built-in "format()" function. Each formattable type +may define how the format specification is to be interpreted. + +Most built-in types implement the following options for format +specifications, although some of the formatting options are only +supported by the numeric types. + +A general convention is that an empty format specification produces +the same result as if you had called "str()" on the value. A non-empty +format specification typically modifies the result. + +The general form of a *standard format specifier* is: + + format_spec ::= [[fill]align][sign]["z"]["#"]["0"][width][grouping_option]["." precision][type] + fill ::= + align ::= "<" | ">" | "=" | "^" + sign ::= "+" | "-" | " " + width ::= digit+ + grouping_option ::= "_" | "," + precision ::= digit+ + type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%" + +If a valid *align* value is specified, it can be preceded by a *fill* +character that can be any character and defaults to a space if +omitted. It is not possible to use a literal curly brace (â€"{"†or +“"}"â€) as the *fill* character in a formatted string literal or when +using the "str.format()" method. However, it is possible to insert a +curly brace with a nested replacement field. This limitation doesn’t +affect the "format()" function. + +The meaning of the various alignment options is as follows: + ++-----------+------------------------------------------------------------+ +| Option | Meaning | +|===========|============================================================| +| "'<'" | Forces the field to be left-aligned within the available | +| | space (this is the default for most objects). | ++-----------+------------------------------------------------------------+ +| "'>'" | Forces the field to be right-aligned within the available | +| | space (this is the default for numbers). | ++-----------+------------------------------------------------------------+ +| "'='" | Forces the padding to be placed after the sign (if any) | +| | but before the digits. This is used for printing fields | +| | in the form ‘+000000120’. This alignment option is only | +| | valid for numeric types, excluding "complex". It becomes | +| | the default for numbers when ‘0’ immediately precedes the | +| | field width. | ++-----------+------------------------------------------------------------+ +| "'^'" | Forces the field to be centered within the available | +| | space. | ++-----------+------------------------------------------------------------+ + +Note that unless a minimum field width is defined, the field width +will always be the same size as the data to fill it, so that the +alignment option has no meaning in this case. + +The *sign* option is only valid for number types, and can be one of +the following: + ++-----------+------------------------------------------------------------+ +| Option | Meaning | +|===========|============================================================| +| "'+'" | indicates that a sign should be used for both positive as | +| | well as negative numbers. | ++-----------+------------------------------------------------------------+ +| "'-'" | indicates that a sign should be used only for negative | +| | numbers (this is the default behavior). | ++-----------+------------------------------------------------------------+ +| space | indicates that a leading space should be used on positive | +| | numbers, and a minus sign on negative numbers. | ++-----------+------------------------------------------------------------+ + +The "'z'" option coerces negative zero floating-point values to +positive zero after rounding to the format precision. This option is +only valid for floating-point presentation types. + +Changed in version 3.11: Added the "'z'" option (see also **PEP +682**). + +The "'#'" option causes the “alternate form†to be used for the +conversion. The alternate form is defined differently for different +types. This option is only valid for integer, float and complex +types. For integers, when binary, octal, or hexadecimal output is +used, this option adds the respective prefix "'0b'", "'0o'", "'0x'", +or "'0X'" to the output value. For float and complex the alternate +form causes the result of the conversion to always contain a decimal- +point character, even if no digits follow it. Normally, a decimal- +point character appears in the result of these conversions only if a +digit follows it. In addition, for "'g'" and "'G'" conversions, +trailing zeros are not removed from the result. + +The "','" option signals the use of a comma for a thousands separator +for floating-point presentation types and for integer presentation +type "'d'". For other presentation types, this option is an error. For +a locale aware separator, use the "'n'" integer presentation type +instead. + +Changed in version 3.1: Added the "','" option (see also **PEP 378**). + +The "'_'" option signals the use of an underscore for a thousands +separator for floating-point presentation types and for integer +presentation type "'d'". For integer presentation types "'b'", "'o'", +"'x'", and "'X'", underscores will be inserted every 4 digits. For +other presentation types, specifying this option is an error. + +Changed in version 3.6: Added the "'_'" option (see also **PEP 515**). + +*width* is a decimal integer defining the minimum total field width, +including any prefixes, separators, and other formatting characters. +If not specified, then the field width will be determined by the +content. + +When no explicit alignment is given, preceding the *width* field by a +zero ("'0'") character enables sign-aware zero-padding for numeric +types, excluding "complex". This is equivalent to a *fill* character +of "'0'" with an *alignment* type of "'='". + +Changed in version 3.10: Preceding the *width* field by "'0'" no +longer affects the default alignment for strings. + +The *precision* is a decimal integer indicating how many digits should +be displayed after the decimal point for presentation types "'f'" and +"'F'", or before and after the decimal point for presentation types +"'g'" or "'G'". For string presentation types the field indicates the +maximum field size - in other words, how many characters will be used +from the field content. The *precision* is not allowed for integer +presentation types. + +Finally, the *type* determines how the data should be presented. + +The available string presentation types are: + + +-----------+------------------------------------------------------------+ + | Type | Meaning | + |===========|============================================================| + | "'s'" | String format. This is the default type for strings and | + | | may be omitted. | + +-----------+------------------------------------------------------------+ + | None | The same as "'s'". | + +-----------+------------------------------------------------------------+ + +The available integer presentation types are: + + +-----------+------------------------------------------------------------+ + | Type | Meaning | + |===========|============================================================| + | "'b'" | Binary format. Outputs the number in base 2. | + +-----------+------------------------------------------------------------+ + | "'c'" | Character. Converts the integer to the corresponding | + | | unicode character before printing. | + +-----------+------------------------------------------------------------+ + | "'d'" | Decimal Integer. Outputs the number in base 10. | + +-----------+------------------------------------------------------------+ + | "'o'" | Octal format. Outputs the number in base 8. | + +-----------+------------------------------------------------------------+ + | "'x'" | Hex format. Outputs the number in base 16, using lower- | + | | case letters for the digits above 9. | + +-----------+------------------------------------------------------------+ + | "'X'" | Hex format. Outputs the number in base 16, using upper- | + | | case letters for the digits above 9. In case "'#'" is | + | | specified, the prefix "'0x'" will be upper-cased to "'0X'" | + | | as well. | + +-----------+------------------------------------------------------------+ + | "'n'" | Number. This is the same as "'d'", except that it uses the | + | | current locale setting to insert the appropriate number | + | | separator characters. | + +-----------+------------------------------------------------------------+ + | None | The same as "'d'". | + +-----------+------------------------------------------------------------+ + +In addition to the above presentation types, integers can be formatted +with the floating-point presentation types listed below (except "'n'" +and "None"). When doing so, "float()" is used to convert the integer +to a floating-point number before formatting. + +The available presentation types for "float" and "Decimal" values are: + + +-----------+------------------------------------------------------------+ + | Type | Meaning | + |===========|============================================================| + | "'e'" | Scientific notation. For a given precision "p", formats | + | | the number in scientific notation with the letter ‘e’ | + | | separating the coefficient from the exponent. The | + | | coefficient has one digit before and "p" digits after the | + | | decimal point, for a total of "p + 1" significant digits. | + | | With no precision given, uses a precision of "6" digits | + | | after the decimal point for "float", and shows all | + | | coefficient digits for "Decimal". If "p=0", the decimal | + | | point is omitted unless the "#" option is used. | + +-----------+------------------------------------------------------------+ + | "'E'" | Scientific notation. Same as "'e'" except it uses an upper | + | | case ‘E’ as the separator character. | + +-----------+------------------------------------------------------------+ + | "'f'" | Fixed-point notation. For a given precision "p", formats | + | | the number as a decimal number with exactly "p" digits | + | | following the decimal point. With no precision given, uses | + | | a precision of "6" digits after the decimal point for | + | | "float", and uses a precision large enough to show all | + | | coefficient digits for "Decimal". If "p=0", the decimal | + | | point is omitted unless the "#" option is used. | + +-----------+------------------------------------------------------------+ + | "'F'" | Fixed-point notation. Same as "'f'", but converts "nan" to | + | | "NAN" and "inf" to "INF". | + +-----------+------------------------------------------------------------+ + | "'g'" | General format. For a given precision "p >= 1", this | + | | rounds the number to "p" significant digits and then | + | | formats the result in either fixed-point format or in | + | | scientific notation, depending on its magnitude. A | + | | precision of "0" is treated as equivalent to a precision | + | | of "1". The precise rules are as follows: suppose that | + | | the result formatted with presentation type "'e'" and | + | | precision "p-1" would have exponent "exp". Then, if "m <= | + | | exp < p", where "m" is -4 for floats and -6 for | + | | "Decimals", the number is formatted with presentation type | + | | "'f'" and precision "p-1-exp". Otherwise, the number is | + | | formatted with presentation type "'e'" and precision | + | | "p-1". In both cases insignificant trailing zeros are | + | | removed from the significand, and the decimal point is | + | | also removed if there are no remaining digits following | + | | it, unless the "'#'" option is used. With no precision | + | | given, uses a precision of "6" significant digits for | + | | "float". For "Decimal", the coefficient of the result is | + | | formed from the coefficient digits of the value; | + | | scientific notation is used for values smaller than "1e-6" | + | | in absolute value and values where the place value of the | + | | least significant digit is larger than 1, and fixed-point | + | | notation is used otherwise. Positive and negative | + | | infinity, positive and negative zero, and nans, are | + | | formatted as "inf", "-inf", "0", "-0" and "nan" | + | | respectively, regardless of the precision. | + +-----------+------------------------------------------------------------+ + | "'G'" | General format. Same as "'g'" except switches to "'E'" if | + | | the number gets too large. The representations of infinity | + | | and NaN are uppercased, too. | + +-----------+------------------------------------------------------------+ + | "'n'" | Number. This is the same as "'g'", except that it uses the | + | | current locale setting to insert the appropriate number | + | | separator characters. | + +-----------+------------------------------------------------------------+ + | "'%'" | Percentage. Multiplies the number by 100 and displays in | + | | fixed ("'f'") format, followed by a percent sign. | + +-----------+------------------------------------------------------------+ + | None | For "float" this is like the "'g'" type, except that when | + | | fixed- point notation is used to format the result, it | + | | always includes at least one digit past the decimal point, | + | | and switches to the scientific notation when "exp >= p - | + | | 1". When the precision is not specified, the latter will | + | | be as large as needed to represent the given value | + | | faithfully. For "Decimal", this is the same as either | + | | "'g'" or "'G'" depending on the value of | + | | "context.capitals" for the current decimal context. The | + | | overall effect is to match the output of "str()" as | + | | altered by the other format modifiers. | + +-----------+------------------------------------------------------------+ + +The result should be correctly rounded to a given precision "p" of +digits after the decimal point. The rounding mode for "float" matches +that of the "round()" builtin. For "Decimal", the rounding mode of +the current context will be used. + +The available presentation types for "complex" are the same as those +for "float" ("'%'" is not allowed). Both the real and imaginary +components of a complex number are formatted as floating-point +numbers, according to the specified presentation type. They are +separated by the mandatory sign of the imaginary part, the latter +being terminated by a "j" suffix. If the presentation type is +missing, the result will match the output of "str()" (complex numbers +with a non-zero real part are also surrounded by parentheses), +possibly altered by other format modifiers. + + +Format examples +=============== + +This section contains examples of the "str.format()" syntax and +comparison with the old "%"-formatting. + +In most of the cases the syntax is similar to the old "%"-formatting, +with the addition of the "{}" and with ":" used instead of "%". For +example, "'%03.2f'" can be translated to "'{:03.2f}'". + +The new format syntax also supports new and different options, shown +in the following examples. + +Accessing arguments by position: + + >>> '{0}, {1}, {2}'.format('a', 'b', 'c') + 'a, b, c' + >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only + 'a, b, c' + >>> '{2}, {1}, {0}'.format('a', 'b', 'c') + 'c, b, a' + >>> '{2}, {1}, {0}'.format(*'abc') # unpacking argument sequence + 'c, b, a' + >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' indices can be repeated + 'abracadabra' + +Accessing arguments by name: + + >>> 'Coordinates: {latitude}, {longitude}'.format(latitude='37.24N', longitude='-115.81W') + 'Coordinates: 37.24N, -115.81W' + >>> coord = {'latitude': '37.24N', 'longitude': '-115.81W'} + >>> 'Coordinates: {latitude}, {longitude}'.format(**coord) + 'Coordinates: 37.24N, -115.81W' + +Accessing arguments’ attributes: + + >>> c = 3-5j + >>> ('The complex number {0} is formed from the real part {0.real} ' + ... 'and the imaginary part {0.imag}.').format(c) + 'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.' + >>> class Point: + ... def __init__(self, x, y): + ... self.x, self.y = x, y + ... def __str__(self): + ... return 'Point({self.x}, {self.y})'.format(self=self) + ... + >>> str(Point(4, 2)) + 'Point(4, 2)' + +Accessing arguments’ items: + + >>> coord = (3, 5) + >>> 'X: {0[0]}; Y: {0[1]}'.format(coord) + 'X: 3; Y: 5' + +Replacing "%s" and "%r": + + >>> "repr() shows quotes: {!r}; str() doesn't: {!s}".format('test1', 'test2') + "repr() shows quotes: 'test1'; str() doesn't: test2" + +Aligning the text and specifying a width: + + >>> '{:<30}'.format('left aligned') + 'left aligned ' + >>> '{:>30}'.format('right aligned') + ' right aligned' + >>> '{:^30}'.format('centered') + ' centered ' + >>> '{:*^30}'.format('centered') # use '*' as a fill char + '***********centered***********' + +Replacing "%+f", "%-f", and "% f" and specifying a sign: + + >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it always + '+3.140000; -3.140000' + >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space for positive numbers + ' 3.140000; -3.140000' + >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only the minus -- same as '{:f}; {:f}' + '3.140000; -3.140000' + +Replacing "%x" and "%o" and converting the value to different bases: + + >>> # format also supports binary numbers + >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42) + 'int: 42; hex: 2a; oct: 52; bin: 101010' + >>> # with 0x, 0o, or 0b as prefix: + >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42) + 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010' + +Using the comma as a thousands separator: + + >>> '{:,}'.format(1234567890) + '1,234,567,890' + +Expressing a percentage: + + >>> points = 19 + >>> total = 22 + >>> 'Correct answers: {:.2%}'.format(points/total) + 'Correct answers: 86.36%' + +Using type-specific formatting: + + >>> import datetime + >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58) + >>> '{:%Y-%m-%d %H:%M:%S}'.format(d) + '2010-07-04 12:15:58' + +Nesting arguments and more complex examples: + + >>> for align, text in zip('<^>', ['left', 'center', 'right']): + ... '{0:{fill}{align}16}'.format(text, fill=align, align=align) + ... + 'left<<<<<<<<<<<<' + '^^^^^center^^^^^' + '>>>>>>>>>>>right' + >>> + >>> octets = [192, 168, 0, 1] + >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets) + 'C0A80001' + >>> int(_, 16) + 3232235521 + >>> + >>> width = 5 + >>> for num in range(5,12): + ... for base in 'dXob': + ... print('{0:{width}{base}}'.format(num, base=base, width=width), end=' ') + ... print() + ... + 5 5 5 101 + 6 6 6 110 + 7 7 7 111 + 8 8 10 1000 + 9 9 11 1001 + 10 A 12 1010 + 11 B 13 1011 +''', + 'function': r'''Function definitions +******************** + +A function definition defines a user-defined function object (see +section The standard type hierarchy): + + funcdef ::= [decorators] "def" funcname [type_params] "(" [parameter_list] ")" + ["->" expression] ":" suite + decorators ::= decorator+ + decorator ::= "@" assignment_expression NEWLINE + parameter_list ::= defparameter ("," defparameter)* "," "/" ["," [parameter_list_no_posonly]] + | parameter_list_no_posonly + parameter_list_no_posonly ::= defparameter ("," defparameter)* ["," [parameter_list_starargs]] + | parameter_list_starargs + parameter_list_starargs ::= "*" [star_parameter] ("," defparameter)* ["," ["**" parameter [","]]] + | "**" parameter [","] + parameter ::= identifier [":" expression] + star_parameter ::= identifier [":" ["*"] expression] + defparameter ::= parameter ["=" expression] + funcname ::= identifier + +A function definition is an executable statement. Its execution binds +the function name in the current local namespace to a function object +(a wrapper around the executable code for the function). This +function object contains a reference to the current global namespace +as the global namespace to be used when the function is called. + +The function definition does not execute the function body; this gets +executed only when the function is called. [4] + +A function definition may be wrapped by one or more *decorator* +expressions. Decorator expressions are evaluated when the function is +defined, in the scope that contains the function definition. The +result must be a callable, which is invoked with the function object +as the only argument. The returned value is bound to the function name +instead of the function object. Multiple decorators are applied in +nested fashion. For example, the following code + + @f1(arg) + @f2 + def func(): pass + +is roughly equivalent to + + def func(): pass + func = f1(arg)(f2(func)) + +except that the original function is not temporarily bound to the name +"func". + +Changed in version 3.9: Functions may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets between the +function’s name and the opening parenthesis for its parameter list. +This indicates to static type checkers that the function is generic. +At runtime, the type parameters can be retrieved from the function’s +"__type_params__" attribute. See Generic functions for more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +When one or more *parameters* have the form *parameter* "=" +*expression*, the function is said to have “default parameter values.†+For a parameter with a default value, the corresponding *argument* may +be omitted from a call, in which case the parameter’s default value is +substituted. If a parameter has a default value, all following +parameters up until the “"*"†must also have a default value — this is +a syntactic restriction that is not expressed by the grammar. + +**Default parameter values are evaluated from left to right when the +function definition is executed.** This means that the expression is +evaluated once, when the function is defined, and that the same “pre- +computed†value is used for each call. This is especially important +to understand when a default parameter value is a mutable object, such +as a list or a dictionary: if the function modifies the object (e.g. +by appending an item to a list), the default parameter value is in +effect modified. This is generally not what was intended. A way +around this is to use "None" as the default, and explicitly test for +it in the body of the function, e.g.: + + def whats_on_the_telly(penguin=None): + if penguin is None: + penguin = [] + penguin.append("property of the zoo") + return penguin + +Function call semantics are described in more detail in section Calls. +A function call always assigns values to all parameters mentioned in +the parameter list, either from positional arguments, from keyword +arguments, or from default values. If the form “"*identifier"†is +present, it is initialized to a tuple receiving any excess positional +parameters, defaulting to the empty tuple. If the form +“"**identifier"†is present, it is initialized to a new ordered +mapping receiving any excess keyword arguments, defaulting to a new +empty mapping of the same type. Parameters after “"*"†or +“"*identifier"†are keyword-only parameters and may only be passed by +keyword arguments. Parameters before “"/"†are positional-only +parameters and may only be passed by positional arguments. + +Changed in version 3.8: The "/" function parameter syntax may be used +to indicate positional-only parameters. See **PEP 570** for details. + +Parameters may have an *annotation* of the form “": expression"†+following the parameter name. Any parameter may have an annotation, +even those of the form "*identifier" or "**identifier". (As a special +case, parameters of the form "*identifier" may have an annotation “": +*expression"â€.) Functions may have “return†annotation of the form +“"-> expression"†after the parameter list. These annotations can be +any valid Python expression. The presence of annotations does not +change the semantics of a function. See Annotations for more +information on annotations. + +Changed in version 3.11: Parameters of the form “"*identifier"†may +have an annotation “": *expression"â€. See **PEP 646**. + +It is also possible to create anonymous functions (functions not bound +to a name), for immediate use in expressions. This uses lambda +expressions, described in section Lambdas. Note that the lambda +expression is merely a shorthand for a simplified function definition; +a function defined in a “"def"†statement can be passed around or +assigned to another name just like a function defined by a lambda +expression. The “"def"†form is actually more powerful since it +allows the execution of multiple statements and annotations. + +**Programmer’s note:** Functions are first-class objects. A “"def"†+statement executed inside a function definition defines a local +function that can be returned or passed around. Free variables used +in the nested function can access the local variables of the function +containing the def. See section Naming and binding for details. + +See also: + + **PEP 3107** - Function Annotations + The original specification for function annotations. + + **PEP 484** - Type Hints + Definition of a standard meaning for annotations: type hints. + + **PEP 526** - Syntax for Variable Annotations + Ability to type hint variable declarations, including class + variables and instance variables. + + **PEP 563** - Postponed Evaluation of Annotations + Support for forward references within annotations by preserving + annotations in a string form at runtime instead of eager + evaluation. + + **PEP 318** - Decorators for Functions and Methods + Function and method decorators were introduced. Class decorators + were introduced in **PEP 3129**. +''', + 'global': r'''The "global" statement +********************** + + global_stmt ::= "global" identifier ("," identifier)* + +The "global" statement causes the listed identifiers to be interpreted +as globals. It would be impossible to assign to a global variable +without "global", although free variables may refer to globals without +being declared global. + +The "global" statement applies to the entire scope of a function or +class body. A "SyntaxError" is raised if a variable is used or +assigned to prior to its global declaration in the scope. + +**Programmer’s note:** "global" is a directive to the parser. It +applies only to code parsed at the same time as the "global" +statement. In particular, a "global" statement contained in a string +or code object supplied to the built-in "exec()" function does not +affect the code block *containing* the function call, and code +contained in such a string is unaffected by "global" statements in the +code containing the function call. The same applies to the "eval()" +and "compile()" functions. +''', + 'id-classes': r'''Reserved classes of identifiers +******************************* + +Certain classes of identifiers (besides keywords) have special +meanings. These classes are identified by the patterns of leading and +trailing underscore characters: + +"_*" + Not imported by "from module import *". + +"_" + In a "case" pattern within a "match" statement, "_" is a soft + keyword that denotes a wildcard. + + Separately, the interactive interpreter makes the result of the + last evaluation available in the variable "_". (It is stored in the + "builtins" module, alongside built-in functions like "print".) + + Elsewhere, "_" is a regular identifier. It is often used to name + “special†items, but it is not special to Python itself. + + Note: + + The name "_" is often used in conjunction with + internationalization; refer to the documentation for the + "gettext" module for more information on this convention.It is + also commonly used for unused variables. + +"__*__" + System-defined names, informally known as “dunder†names. These + names are defined by the interpreter and its implementation + (including the standard library). Current system names are + discussed in the Special method names section and elsewhere. More + will likely be defined in future versions of Python. *Any* use of + "__*__" names, in any context, that does not follow explicitly + documented use, is subject to breakage without warning. + +"__*" + Class-private names. Names in this category, when used within the + context of a class definition, are re-written to use a mangled form + to help avoid name clashes between “private†attributes of base and + derived classes. See section Identifiers (Names). +''', + 'identifiers': r'''Identifiers and keywords +************************ + +Identifiers (also referred to as *names*) are described by the +following lexical definitions. + +The syntax of identifiers in Python is based on the Unicode standard +annex UAX-31, with elaboration and changes as defined below; see also +**PEP 3131** for further details. + +Within the ASCII range (U+0001..U+007F), the valid characters for +identifiers include the uppercase and lowercase letters "A" through +"Z", the underscore "_" and, except for the first character, the +digits "0" through "9". Python 3.0 introduced additional characters +from outside the ASCII range (see **PEP 3131**). For these +characters, the classification uses the version of the Unicode +Character Database as included in the "unicodedata" module. + +Identifiers are unlimited in length. Case is significant. + + identifier ::= xid_start xid_continue* + id_start ::= + id_continue ::= + xid_start ::= + xid_continue ::= + +The Unicode category codes mentioned above stand for: + +* *Lu* - uppercase letters + +* *Ll* - lowercase letters + +* *Lt* - titlecase letters + +* *Lm* - modifier letters + +* *Lo* - other letters + +* *Nl* - letter numbers + +* *Mn* - nonspacing marks + +* *Mc* - spacing combining marks + +* *Nd* - decimal numbers + +* *Pc* - connector punctuations + +* *Other_ID_Start* - explicit list of characters in PropList.txt to + support backwards compatibility + +* *Other_ID_Continue* - likewise + +All identifiers are converted into the normal form NFKC while parsing; +comparison of identifiers is based on NFKC. + +A non-normative HTML file listing all valid identifier characters for +Unicode 16.0.0 can be found at +https://www.unicode.org/Public/16.0.0/ucd/DerivedCoreProperties.txt + + +Keywords +======== + +The following identifiers are used as reserved words, or *keywords* of +the language, and cannot be used as ordinary identifiers. They must +be spelled exactly as written here: + + False await else import pass + None break except in raise + True class finally is return + and continue for lambda try + as def from nonlocal while + assert del global not with + async elif if or yield + + +Soft Keywords +============= + +Added in version 3.10. + +Some identifiers are only reserved under specific contexts. These are +known as *soft keywords*. The identifiers "match", "case", "type" and +"_" can syntactically act as keywords in certain contexts, but this +distinction is done at the parser level, not when tokenizing. + +As soft keywords, their use in the grammar is possible while still +preserving compatibility with existing code that uses these names as +identifier names. + +"match", "case", and "_" are used in the "match" statement. "type" is +used in the "type" statement. + +Changed in version 3.12: "type" is now a soft keyword. + + +Reserved classes of identifiers +=============================== + +Certain classes of identifiers (besides keywords) have special +meanings. These classes are identified by the patterns of leading and +trailing underscore characters: + +"_*" + Not imported by "from module import *". + +"_" + In a "case" pattern within a "match" statement, "_" is a soft + keyword that denotes a wildcard. + + Separately, the interactive interpreter makes the result of the + last evaluation available in the variable "_". (It is stored in the + "builtins" module, alongside built-in functions like "print".) + + Elsewhere, "_" is a regular identifier. It is often used to name + “special†items, but it is not special to Python itself. + + Note: + + The name "_" is often used in conjunction with + internationalization; refer to the documentation for the + "gettext" module for more information on this convention.It is + also commonly used for unused variables. + +"__*__" + System-defined names, informally known as “dunder†names. These + names are defined by the interpreter and its implementation + (including the standard library). Current system names are + discussed in the Special method names section and elsewhere. More + will likely be defined in future versions of Python. *Any* use of + "__*__" names, in any context, that does not follow explicitly + documented use, is subject to breakage without warning. + +"__*" + Class-private names. Names in this category, when used within the + context of a class definition, are re-written to use a mangled form + to help avoid name clashes between “private†attributes of base and + derived classes. See section Identifiers (Names). +''', + 'if': r'''The "if" statement +****************** + +The "if" statement is used for conditional execution: + + if_stmt ::= "if" assignment_expression ":" suite + ("elif" assignment_expression ":" suite)* + ["else" ":" suite] + +It selects exactly one of the suites by evaluating the expressions one +by one until one is found to be true (see section Boolean operations +for the definition of true and false); then that suite is executed +(and no other part of the "if" statement is executed or evaluated). +If all expressions are false, the suite of the "else" clause, if +present, is executed. +''', + 'imaginary': r'''Imaginary literals +****************** + +Imaginary literals are described by the following lexical definitions: + + imagnumber ::= (floatnumber | digitpart) ("j" | "J") + +An imaginary literal yields a complex number with a real part of 0.0. +Complex numbers are represented as a pair of floating-point numbers +and have the same restrictions on their range. To create a complex +number with a nonzero real part, add a floating-point number to it, +e.g., "(3+4j)". Some examples of imaginary literals: + + 3.14j 10.j 10j .001j 1e100j 3.14e-10j 3.14_15_93j +''', + 'import': r'''The "import" statement +********************** + + import_stmt ::= "import" module ["as" identifier] ("," module ["as" identifier])* + | "from" relative_module "import" identifier ["as" identifier] + ("," identifier ["as" identifier])* + | "from" relative_module "import" "(" identifier ["as" identifier] + ("," identifier ["as" identifier])* [","] ")" + | "from" relative_module "import" "*" + module ::= (identifier ".")* identifier + relative_module ::= "."* module | "."+ + +The basic import statement (no "from" clause) is executed in two +steps: + +1. find a module, loading and initializing it if necessary + +2. define a name or names in the local namespace for the scope where + the "import" statement occurs. + +When the statement contains multiple clauses (separated by commas) the +two steps are carried out separately for each clause, just as though +the clauses had been separated out into individual import statements. + +The details of the first step, finding and loading modules, are +described in greater detail in the section on the import system, which +also describes the various types of packages and modules that can be +imported, as well as all the hooks that can be used to customize the +import system. Note that failures in this step may indicate either +that the module could not be located, *or* that an error occurred +while initializing the module, which includes execution of the +module’s code. + +If the requested module is retrieved successfully, it will be made +available in the local namespace in one of three ways: + +* If the module name is followed by "as", then the name following "as" + is bound directly to the imported module. + +* If no other name is specified, and the module being imported is a + top level module, the module’s name is bound in the local namespace + as a reference to the imported module + +* If the module being imported is *not* a top level module, then the + name of the top level package that contains the module is bound in + the local namespace as a reference to the top level package. The + imported module must be accessed using its full qualified name + rather than directly + +The "from" form uses a slightly more complex process: + +1. find the module specified in the "from" clause, loading and + initializing it if necessary; + +2. for each of the identifiers specified in the "import" clauses: + + 1. check if the imported module has an attribute by that name + + 2. if not, attempt to import a submodule with that name and then + check the imported module again for that attribute + + 3. if the attribute is not found, "ImportError" is raised. + + 4. otherwise, a reference to that value is stored in the local + namespace, using the name in the "as" clause if it is present, + otherwise using the attribute name + +Examples: + + import foo # foo imported and bound locally + import foo.bar.baz # foo, foo.bar, and foo.bar.baz imported, foo bound locally + import foo.bar.baz as fbb # foo, foo.bar, and foo.bar.baz imported, foo.bar.baz bound as fbb + from foo.bar import baz # foo, foo.bar, and foo.bar.baz imported, foo.bar.baz bound as baz + from foo import attr # foo imported and foo.attr bound as attr + +If the list of identifiers is replaced by a star ("'*'"), all public +names defined in the module are bound in the local namespace for the +scope where the "import" statement occurs. + +The *public names* defined by a module are determined by checking the +module’s namespace for a variable named "__all__"; if defined, it must +be a sequence of strings which are names defined or imported by that +module. The names given in "__all__" are all considered public and +are required to exist. If "__all__" is not defined, the set of public +names includes all names found in the module’s namespace which do not +begin with an underscore character ("'_'"). "__all__" should contain +the entire public API. It is intended to avoid accidentally exporting +items that are not part of the API (such as library modules which were +imported and used within the module). + +The wild card form of import — "from module import *" — is only +allowed at the module level. Attempting to use it in class or +function definitions will raise a "SyntaxError". + +When specifying what module to import you do not have to specify the +absolute name of the module. When a module or package is contained +within another package it is possible to make a relative import within +the same top package without having to mention the package name. By +using leading dots in the specified module or package after "from" you +can specify how high to traverse up the current package hierarchy +without specifying exact names. One leading dot means the current +package where the module making the import exists. Two dots means up +one package level. Three dots is up two levels, etc. So if you execute +"from . import mod" from a module in the "pkg" package then you will +end up importing "pkg.mod". If you execute "from ..subpkg2 import mod" +from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The +specification for relative imports is contained in the Package +Relative Imports section. + +"importlib.import_module()" is provided to support applications that +determine dynamically the modules to be loaded. + +Raises an auditing event "import" with arguments "module", "filename", +"sys.path", "sys.meta_path", "sys.path_hooks". + + +Future statements +================= + +A *future statement* is a directive to the compiler that a particular +module should be compiled using syntax or semantics that will be +available in a specified future release of Python where the feature +becomes standard. + +The future statement is intended to ease migration to future versions +of Python that introduce incompatible changes to the language. It +allows use of the new features on a per-module basis before the +release in which the feature becomes standard. + + future_stmt ::= "from" "__future__" "import" feature ["as" identifier] + ("," feature ["as" identifier])* + | "from" "__future__" "import" "(" feature ["as" identifier] + ("," feature ["as" identifier])* [","] ")" + feature ::= identifier + +A future statement must appear near the top of the module. The only +lines that can appear before a future statement are: + +* the module docstring (if any), + +* comments, + +* blank lines, and + +* other future statements. + +The only feature that requires using the future statement is +"annotations" (see **PEP 563**). + +All historical features enabled by the future statement are still +recognized by Python 3. The list includes "absolute_import", +"division", "generators", "generator_stop", "unicode_literals", +"print_function", "nested_scopes" and "with_statement". They are all +redundant because they are always enabled, and only kept for backwards +compatibility. + +A future statement is recognized and treated specially at compile +time: Changes to the semantics of core constructs are often +implemented by generating different code. It may even be the case +that a new feature introduces new incompatible syntax (such as a new +reserved word), in which case the compiler may need to parse the +module differently. Such decisions cannot be pushed off until +runtime. + +For any given release, the compiler knows which feature names have +been defined, and raises a compile-time error if a future statement +contains a feature not known to it. + +The direct runtime semantics are the same as for any import statement: +there is a standard module "__future__", described later, and it will +be imported in the usual way at the time the future statement is +executed. + +The interesting runtime semantics depend on the specific feature +enabled by the future statement. + +Note that there is nothing special about the statement: + + import __future__ [as name] + +That is not a future statement; it’s an ordinary import statement with +no special semantics or syntax restrictions. + +Code compiled by calls to the built-in functions "exec()" and +"compile()" that occur in a module "M" containing a future statement +will, by default, use the new syntax or semantics associated with the +future statement. This can be controlled by optional arguments to +"compile()" — see the documentation of that function for details. + +A future statement typed at an interactive interpreter prompt will +take effect for the rest of the interpreter session. If an +interpreter is started with the "-i" option, is passed a script name +to execute, and the script includes a future statement, it will be in +effect in the interactive session started after the script is +executed. + +See also: + + **PEP 236** - Back to the __future__ + The original proposal for the __future__ mechanism. +''', + 'in': r'''Membership test operations +************************** + +The operators "in" and "not in" test for membership. "x in s" +evaluates to "True" if *x* is a member of *s*, and "False" otherwise. +"x not in s" returns the negation of "x in s". All built-in sequences +and set types support this as well as dictionary, for which "in" tests +whether the dictionary has a given key. For container types such as +list, tuple, set, frozenset, dict, or collections.deque, the +expression "x in y" is equivalent to "any(x is e or x == e for e in +y)". + +For the string and bytes types, "x in y" is "True" if and only if *x* +is a substring of *y*. An equivalent test is "y.find(x) != -1". +Empty strings are always considered to be a substring of any other +string, so """ in "abc"" will return "True". + +For user-defined classes which define the "__contains__()" method, "x +in y" returns "True" if "y.__contains__(x)" returns a true value, and +"False" otherwise. + +For user-defined classes which do not define "__contains__()" but do +define "__iter__()", "x in y" is "True" if some value "z", for which +the expression "x is z or x == z" is true, is produced while iterating +over "y". If an exception is raised during the iteration, it is as if +"in" raised that exception. + +Lastly, the old-style iteration protocol is tried: if a class defines +"__getitem__()", "x in y" is "True" if and only if there is a non- +negative integer index *i* such that "x is y[i] or x == y[i]", and no +lower integer index raises the "IndexError" exception. (If any other +exception is raised, it is as if "in" raised that exception). + +The operator "not in" is defined to have the inverse truth value of +"in". +''', + 'integers': r'''Integer literals +**************** + +Integer literals are described by the following lexical definitions: + + integer ::= decinteger | bininteger | octinteger | hexinteger + decinteger ::= nonzerodigit (["_"] digit)* | "0"+ (["_"] "0")* + bininteger ::= "0" ("b" | "B") (["_"] bindigit)+ + octinteger ::= "0" ("o" | "O") (["_"] octdigit)+ + hexinteger ::= "0" ("x" | "X") (["_"] hexdigit)+ + nonzerodigit ::= "1"..."9" + digit ::= "0"..."9" + bindigit ::= "0" | "1" + octdigit ::= "0"..."7" + hexdigit ::= digit | "a"..."f" | "A"..."F" + +There is no limit for the length of integer literals apart from what +can be stored in available memory. + +Underscores are ignored for determining the numeric value of the +literal. They can be used to group digits for enhanced readability. +One underscore can occur between digits, and after base specifiers +like "0x". + +Note that leading zeros in a non-zero decimal number are not allowed. +This is for disambiguation with C-style octal literals, which Python +used before version 3.0. + +Some examples of integer literals: + + 7 2147483647 0o177 0b100110111 + 3 79228162514264337593543950336 0o377 0xdeadbeef + 100_000_000_000 0b_1110_0101 + +Changed in version 3.6: Underscores are now allowed for grouping +purposes in literals. +''', + 'lambda': r'''Lambdas +******* + + lambda_expr ::= "lambda" [parameter_list] ":" expression + +Lambda expressions (sometimes called lambda forms) are used to create +anonymous functions. The expression "lambda parameters: expression" +yields a function object. The unnamed object behaves like a function +object defined with: + + def (parameters): + return expression + +See section Function definitions for the syntax of parameter lists. +Note that functions created with lambda expressions cannot contain +statements or annotations. +''', + 'lists': r'''List displays +************* + +A list display is a possibly empty series of expressions enclosed in +square brackets: + + list_display ::= "[" [flexible_expression_list | comprehension] "]" + +A list display yields a new list object, the contents being specified +by either a list of expressions or a comprehension. When a comma- +separated list of expressions is supplied, its elements are evaluated +from left to right and placed into the list object in that order. +When a comprehension is supplied, the list is constructed from the +elements resulting from the comprehension. +''', + 'naming': r'''Naming and binding +****************** + + +Binding of names +================ + +*Names* refer to objects. Names are introduced by name binding +operations. + +The following constructs bind names: + +* formal parameters to functions, + +* class definitions, + +* function definitions, + +* assignment expressions, + +* targets that are identifiers if occurring in an assignment: + + * "for" loop header, + + * after "as" in a "with" statement, "except" clause, "except*" + clause, or in the as-pattern in structural pattern matching, + + * in a capture pattern in structural pattern matching + +* "import" statements. + +* "type" statements. + +* type parameter lists. + +The "import" statement of the form "from ... import *" binds all names +defined in the imported module, except those beginning with an +underscore. This form may only be used at the module level. + +A target occurring in a "del" statement is also considered bound for +this purpose (though the actual semantics are to unbind the name). + +Each assignment or import statement occurs within a block defined by a +class or function definition or at the module level (the top-level +code block). + +If a name is bound in a block, it is a local variable of that block, +unless declared as "nonlocal" or "global". If a name is bound at the +module level, it is a global variable. (The variables of the module +code block are local and global.) If a variable is used in a code +block but not defined there, it is a *free variable*. + +Each occurrence of a name in the program text refers to the *binding* +of that name established by the following name resolution rules. + + +Resolution of names +=================== + +A *scope* defines the visibility of a name within a block. If a local +variable is defined in a block, its scope includes that block. If the +definition occurs in a function block, the scope extends to any blocks +contained within the defining one, unless a contained block introduces +a different binding for the name. + +When a name is used in a code block, it is resolved using the nearest +enclosing scope. The set of all such scopes visible to a code block +is called the block’s *environment*. + +When a name is not found at all, a "NameError" exception is raised. If +the current scope is a function scope, and the name refers to a local +variable that has not yet been bound to a value at the point where the +name is used, an "UnboundLocalError" exception is raised. +"UnboundLocalError" is a subclass of "NameError". + +If a name binding operation occurs anywhere within a code block, all +uses of the name within the block are treated as references to the +current block. This can lead to errors when a name is used within a +block before it is bound. This rule is subtle. Python lacks +declarations and allows name binding operations to occur anywhere +within a code block. The local variables of a code block can be +determined by scanning the entire text of the block for name binding +operations. See the FAQ entry on UnboundLocalError for examples. + +If the "global" statement occurs within a block, all uses of the names +specified in the statement refer to the bindings of those names in the +top-level namespace. Names are resolved in the top-level namespace by +searching the global namespace, i.e. the namespace of the module +containing the code block, and the builtins namespace, the namespace +of the module "builtins". The global namespace is searched first. If +the names are not found there, the builtins namespace is searched +next. If the names are also not found in the builtins namespace, new +variables are created in the global namespace. The global statement +must precede all uses of the listed names. + +The "global" statement has the same scope as a name binding operation +in the same block. If the nearest enclosing scope for a free variable +contains a global statement, the free variable is treated as a global. + +The "nonlocal" statement causes corresponding names to refer to +previously bound variables in the nearest enclosing function scope. +"SyntaxError" is raised at compile time if the given name does not +exist in any enclosing function scope. Type parameters cannot be +rebound with the "nonlocal" statement. + +The namespace for a module is automatically created the first time a +module is imported. The main module for a script is always called +"__main__". + +Class definition blocks and arguments to "exec()" and "eval()" are +special in the context of name resolution. A class definition is an +executable statement that may use and define names. These references +follow the normal rules for name resolution with an exception that +unbound local variables are looked up in the global namespace. The +namespace of the class definition becomes the attribute dictionary of +the class. The scope of names defined in a class block is limited to +the class block; it does not extend to the code blocks of methods. +This includes comprehensions and generator expressions, but it does +not include annotation scopes, which have access to their enclosing +class scopes. This means that the following will fail: + + class A: + a = 42 + b = list(a + i for i in range(10)) + +However, the following will succeed: + + class A: + type Alias = Nested + class Nested: pass + + print(A.Alias.__value__) # + + +Annotation scopes +================= + +*Annotations*, type parameter lists and "type" statements introduce +*annotation scopes*, which behave mostly like function scopes, but +with some exceptions discussed below. + +Annotation scopes are used in the following contexts: + +* *Function annotations*. + +* *Variable annotations*. + +* Type parameter lists for generic type aliases. + +* Type parameter lists for generic functions. A generic function’s + annotations are executed within the annotation scope, but its + defaults and decorators are not. + +* Type parameter lists for generic classes. A generic class’s base + classes and keyword arguments are executed within the annotation + scope, but its decorators are not. + +* The bounds, constraints, and default values for type parameters + (lazily evaluated). + +* The value of type aliases (lazily evaluated). + +Annotation scopes differ from function scopes in the following ways: + +* Annotation scopes have access to their enclosing class namespace. If + an annotation scope is immediately within a class scope, or within + another annotation scope that is immediately within a class scope, + the code in the annotation scope can use names defined in the class + scope as if it were executed directly within the class body. This + contrasts with regular functions defined within classes, which + cannot access names defined in the class scope. + +* Expressions in annotation scopes cannot contain "yield", "yield + from", "await", or ":=" expressions. (These expressions are allowed + in other scopes contained within the annotation scope.) + +* Names defined in annotation scopes cannot be rebound with "nonlocal" + statements in inner scopes. This includes only type parameters, as + no other syntactic elements that can appear within annotation scopes + can introduce new names. + +* While annotation scopes have an internal name, that name is not + reflected in the *qualified name* of objects defined within the + scope. Instead, the "__qualname__" of such objects is as if the + object were defined in the enclosing scope. + +Added in version 3.12: Annotation scopes were introduced in Python +3.12 as part of **PEP 695**. + +Changed in version 3.13: Annotation scopes are also used for type +parameter defaults, as introduced by **PEP 696**. + +Changed in version 3.14: Annotation scopes are now also used for +annotations, as specified in **PEP 649** and **PEP 749**. + + +Lazy evaluation +=============== + +Most annotation scopes are *lazily evaluated*. This includes +annotations, the values of type aliases created through the "type" +statement, and the bounds, constraints, and default values of type +variables created through the type parameter syntax. This means that +they are not evaluated when the type alias or type variable is +created, or when the object carrying annotations is created. Instead, +they are only evaluated when necessary, for example when the +"__value__" attribute on a type alias is accessed. + +Example: + + >>> type Alias = 1/0 + >>> Alias.__value__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + >>> def func[T: 1/0](): pass + >>> T = func.__type_params__[0] + >>> T.__bound__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + +Here the exception is raised only when the "__value__" attribute of +the type alias or the "__bound__" attribute of the type variable is +accessed. + +This behavior is primarily useful for references to types that have +not yet been defined when the type alias or type variable is created. +For example, lazy evaluation enables creation of mutually recursive +type aliases: + + from typing import Literal + + type SimpleExpr = int | Parenthesized + type Parenthesized = tuple[Literal["("], Expr, Literal[")"]] + type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", "-"], Expr] + +Lazily evaluated values are evaluated in annotation scope, which means +that names that appear inside the lazily evaluated value are looked up +as if they were used in the immediately enclosing scope. + +Added in version 3.12. + + +Builtins and restricted execution +================================= + +**CPython implementation detail:** Users should not touch +"__builtins__"; it is strictly an implementation detail. Users +wanting to override values in the builtins namespace should "import" +the "builtins" module and modify its attributes appropriately. + +The builtins namespace associated with the execution of a code block +is actually found by looking up the name "__builtins__" in its global +namespace; this should be a dictionary or a module (in the latter case +the module’s dictionary is used). By default, when in the "__main__" +module, "__builtins__" is the built-in module "builtins"; when in any +other module, "__builtins__" is an alias for the dictionary of the +"builtins" module itself. + + +Interaction with dynamic features +================================= + +Name resolution of free variables occurs at runtime, not at compile +time. This means that the following code will print 42: + + i = 10 + def f(): + print(i) + i = 42 + f() + +The "eval()" and "exec()" functions do not have access to the full +environment for resolving names. Names may be resolved in the local +and global namespaces of the caller. Free variables are not resolved +in the nearest enclosing namespace, but in the global namespace. [1] +The "exec()" and "eval()" functions have optional arguments to +override the global and local namespace. If only one namespace is +specified, it is used for both. +''', + 'nonlocal': r'''The "nonlocal" statement +************************ + + nonlocal_stmt ::= "nonlocal" identifier ("," identifier)* + +When the definition of a function or class is nested (enclosed) within +the definitions of other functions, its nonlocal scopes are the local +scopes of the enclosing functions. The "nonlocal" statement causes the +listed identifiers to refer to names previously bound in nonlocal +scopes. It allows encapsulated code to rebind such nonlocal +identifiers. If a name is bound in more than one nonlocal scope, the +nearest binding is used. If a name is not bound in any nonlocal scope, +or if there is no nonlocal scope, a "SyntaxError" is raised. + +The "nonlocal" statement applies to the entire scope of a function or +class body. A "SyntaxError" is raised if a variable is used or +assigned to prior to its nonlocal declaration in the scope. + +See also: + + **PEP 3104** - Access to Names in Outer Scopes + The specification for the "nonlocal" statement. + +**Programmer’s note:** "nonlocal" is a directive to the parser and +applies only to code parsed along with it. See the note for the +"global" statement. +''', + 'numbers': r'''Numeric literals +**************** + +There are three types of numeric literals: integers, floating-point +numbers, and imaginary numbers. There are no complex literals +(complex numbers can be formed by adding a real number and an +imaginary number). + +Note that numeric literals do not include a sign; a phrase like "-1" +is actually an expression composed of the unary operator ‘"-"’ and the +literal "1". +''', + 'numeric-types': r'''Emulating numeric types +*********************** + +The following methods can be defined to emulate numeric objects. +Methods corresponding to operations that are not supported by the +particular kind of number implemented (e.g., bitwise operations for +non-integral numbers) should be left undefined. + +object.__add__(self, other) +object.__sub__(self, other) +object.__mul__(self, other) +object.__matmul__(self, other) +object.__truediv__(self, other) +object.__floordiv__(self, other) +object.__mod__(self, other) +object.__divmod__(self, other) +object.__pow__(self, other[, modulo]) +object.__lshift__(self, other) +object.__rshift__(self, other) +object.__and__(self, other) +object.__xor__(self, other) +object.__or__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, to + evaluate the expression "x + y", where *x* is an instance of a + class that has an "__add__()" method, "type(x).__add__(x, y)" is + called. The "__divmod__()" method should be the equivalent to + using "__floordiv__()" and "__mod__()"; it should not be related to + "__truediv__()". Note that "__pow__()" should be defined to accept + an optional third argument if the ternary version of the built-in + "pow()" function is to be supported. + + If one of those methods does not support the operation with the + supplied arguments, it should return "NotImplemented". + +object.__radd__(self, other) +object.__rsub__(self, other) +object.__rmul__(self, other) +object.__rmatmul__(self, other) +object.__rtruediv__(self, other) +object.__rfloordiv__(self, other) +object.__rmod__(self, other) +object.__rdivmod__(self, other) +object.__rpow__(self, other[, modulo]) +object.__rlshift__(self, other) +object.__rrshift__(self, other) +object.__rand__(self, other) +object.__rxor__(self, other) +object.__ror__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|") with reflected (swapped) + operands. These functions are only called if the operands are of + different types, when the left operand does not support the + corresponding operation [3], or the right operand’s class is + derived from the left operand’s class. [4] For instance, to + evaluate the expression "x - y", where *y* is an instance of a + class that has an "__rsub__()" method, "type(y).__rsub__(y, x)" is + called if "type(x).__sub__(x, y)" returns "NotImplemented" or + "type(y)" is a subclass of "type(x)". [5] + + Note that ternary "pow()" will not try calling "__rpow__()" (the + coercion rules would become too complicated). + + Note: + + If the right operand’s type is a subclass of the left operand’s + type and that subclass provides a different implementation of the + reflected method for the operation, this method will be called + before the left operand’s non-reflected method. This behavior + allows subclasses to override their ancestors’ operations. + +object.__iadd__(self, other) +object.__isub__(self, other) +object.__imul__(self, other) +object.__imatmul__(self, other) +object.__itruediv__(self, other) +object.__ifloordiv__(self, other) +object.__imod__(self, other) +object.__ipow__(self, other[, modulo]) +object.__ilshift__(self, other) +object.__irshift__(self, other) +object.__iand__(self, other) +object.__ixor__(self, other) +object.__ior__(self, other) + + These methods are called to implement the augmented arithmetic + assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", "**=", + "<<=", ">>=", "&=", "^=", "|="). These methods should attempt to + do the operation in-place (modifying *self*) and return the result + (which could be, but does not have to be, *self*). If a specific + method is not defined, or if that method returns "NotImplemented", + the augmented assignment falls back to the normal methods. For + instance, if *x* is an instance of a class with an "__iadd__()" + method, "x += y" is equivalent to "x = x.__iadd__(y)" . If + "__iadd__()" does not exist, or if "x.__iadd__(y)" returns + "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are + considered, as with the evaluation of "x + y". In certain + situations, augmented assignment can result in unexpected errors + (see Why does a_tuple[i] += [‘item’] raise an exception when the + addition works?), but this behavior is in fact part of the data + model. + +object.__neg__(self) +object.__pos__(self) +object.__abs__(self) +object.__invert__(self) + + Called to implement the unary arithmetic operations ("-", "+", + "abs()" and "~"). + +object.__complex__(self) +object.__int__(self) +object.__float__(self) + + Called to implement the built-in functions "complex()", "int()" and + "float()". Should return a value of the appropriate type. + +object.__index__(self) + + Called to implement "operator.index()", and whenever Python needs + to losslessly convert the numeric object to an integer object (such + as in slicing, or in the built-in "bin()", "hex()" and "oct()" + functions). Presence of this method indicates that the numeric + object is an integer type. Must return an integer. + + If "__int__()", "__float__()" and "__complex__()" are not defined + then corresponding built-in functions "int()", "float()" and + "complex()" fall back to "__index__()". + +object.__round__(self[, ndigits]) +object.__trunc__(self) +object.__floor__(self) +object.__ceil__(self) + + Called to implement the built-in function "round()" and "math" + functions "trunc()", "floor()" and "ceil()". Unless *ndigits* is + passed to "__round__()" all these methods should return the value + of the object truncated to an "Integral" (typically an "int"). + + Changed in version 3.14: "int()" no longer delegates to the + "__trunc__()" method. +''', + 'objects': r'''Objects, values and types +************************* + +*Objects* are Python’s abstraction for data. All data in a Python +program is represented by objects or by relations between objects. (In +a sense, and in conformance to Von Neumann’s model of a “stored +program computerâ€, code is also represented by objects.) + +Every object has an identity, a type and a value. An object’s +*identity* never changes once it has been created; you may think of it +as the object’s address in memory. The "is" operator compares the +identity of two objects; the "id()" function returns an integer +representing its identity. + +**CPython implementation detail:** For CPython, "id(x)" is the memory +address where "x" is stored. + +An object’s type determines the operations that the object supports +(e.g., “does it have a length?â€) and also defines the possible values +for objects of that type. The "type()" function returns an object’s +type (which is an object itself). Like its identity, an object’s +*type* is also unchangeable. [1] + +The *value* of some objects can change. Objects whose value can +change are said to be *mutable*; objects whose value is unchangeable +once they are created are called *immutable*. (The value of an +immutable container object that contains a reference to a mutable +object can change when the latter’s value is changed; however the +container is still considered immutable, because the collection of +objects it contains cannot be changed. So, immutability is not +strictly the same as having an unchangeable value, it is more subtle.) +An object’s mutability is determined by its type; for instance, +numbers, strings and tuples are immutable, while dictionaries and +lists are mutable. + +Objects are never explicitly destroyed; however, when they become +unreachable they may be garbage-collected. An implementation is +allowed to postpone garbage collection or omit it altogether — it is a +matter of implementation quality how garbage collection is +implemented, as long as no objects are collected that are still +reachable. + +**CPython implementation detail:** CPython currently uses a reference- +counting scheme with (optional) delayed detection of cyclically linked +garbage, which collects most objects as soon as they become +unreachable, but is not guaranteed to collect garbage containing +circular references. See the documentation of the "gc" module for +information on controlling the collection of cyclic garbage. Other +implementations act differently and CPython may change. Do not depend +on immediate finalization of objects when they become unreachable (so +you should always close files explicitly). + +Note that the use of the implementation’s tracing or debugging +facilities may keep objects alive that would normally be collectable. +Also note that catching an exception with a "try"…"except" statement +may keep objects alive. + +Some objects contain references to “external†resources such as open +files or windows. It is understood that these resources are freed +when the object is garbage-collected, but since garbage collection is +not guaranteed to happen, such objects also provide an explicit way to +release the external resource, usually a "close()" method. Programs +are strongly recommended to explicitly close such objects. The +"try"…"finally" statement and the "with" statement provide convenient +ways to do this. + +Some objects contain references to other objects; these are called +*containers*. Examples of containers are tuples, lists and +dictionaries. The references are part of a container’s value. In +most cases, when we talk about the value of a container, we imply the +values, not the identities of the contained objects; however, when we +talk about the mutability of a container, only the identities of the +immediately contained objects are implied. So, if an immutable +container (like a tuple) contains a reference to a mutable object, its +value changes if that mutable object is changed. + +Types affect almost all aspects of object behavior. Even the +importance of object identity is affected in some sense: for immutable +types, operations that compute new values may actually return a +reference to any existing object with the same type and value, while +for mutable objects this is not allowed. For example, after "a = 1; b += 1", *a* and *b* may or may not refer to the same object with the +value one, depending on the implementation. This is because "int" is +an immutable type, so the reference to "1" can be reused. This +behaviour depends on the implementation used, so should not be relied +upon, but is something to be aware of when making use of object +identity tests. However, after "c = []; d = []", *c* and *d* are +guaranteed to refer to two different, unique, newly created empty +lists. (Note that "e = f = []" assigns the *same* object to both *e* +and *f*.) +''', + 'operator-summary': r'''Operator precedence +******************* + +The following table summarizes the operator precedence in Python, from +highest precedence (most binding) to lowest precedence (least +binding). Operators in the same box have the same precedence. Unless +the syntax is explicitly given, operators are binary. Operators in +the same box group left to right (except for exponentiation and +conditional expressions, which group from right to left). + +Note that comparisons, membership tests, and identity tests, all have +the same precedence and have a left-to-right chaining feature as +described in the Comparisons section. + ++-------------------------------------------------+---------------------------------------+ +| Operator | Description | +|=================================================|=======================================| +| "(expressions...)", "[expressions...]", "{key: | Binding or parenthesized expression, | +| value...}", "{expressions...}" | list display, dictionary display, set | +| | display | ++-------------------------------------------------+---------------------------------------+ +| "x[index]", "x[index:index]", | Subscription, slicing, call, | +| "x(arguments...)", "x.attribute" | attribute reference | ++-------------------------------------------------+---------------------------------------+ +| "await x" | Await expression | ++-------------------------------------------------+---------------------------------------+ +| "**" | Exponentiation [5] | ++-------------------------------------------------+---------------------------------------+ +| "+x", "-x", "~x" | Positive, negative, bitwise NOT | ++-------------------------------------------------+---------------------------------------+ +| "*", "@", "/", "//", "%" | Multiplication, matrix | +| | multiplication, division, floor | +| | division, remainder [6] | ++-------------------------------------------------+---------------------------------------+ +| "+", "-" | Addition and subtraction | ++-------------------------------------------------+---------------------------------------+ +| "<<", ">>" | Shifts | ++-------------------------------------------------+---------------------------------------+ +| "&" | Bitwise AND | ++-------------------------------------------------+---------------------------------------+ +| "^" | Bitwise XOR | ++-------------------------------------------------+---------------------------------------+ +| "|" | Bitwise OR | ++-------------------------------------------------+---------------------------------------+ +| "in", "not in", "is", "is not", "<", "<=", ">", | Comparisons, including membership | +| ">=", "!=", "==" | tests and identity tests | ++-------------------------------------------------+---------------------------------------+ +| "not x" | Boolean NOT | ++-------------------------------------------------+---------------------------------------+ +| "and" | Boolean AND | ++-------------------------------------------------+---------------------------------------+ +| "or" | Boolean OR | ++-------------------------------------------------+---------------------------------------+ +| "if" – "else" | Conditional expression | ++-------------------------------------------------+---------------------------------------+ +| "lambda" | Lambda expression | ++-------------------------------------------------+---------------------------------------+ +| ":=" | Assignment expression | ++-------------------------------------------------+---------------------------------------+ + +-[ Footnotes ]- + +[1] While "abs(x%y) < abs(y)" is true mathematically, for floats it + may not be true numerically due to roundoff. For example, and + assuming a platform on which a Python float is an IEEE 754 double- + precision number, in order that "-1e-100 % 1e100" have the same + sign as "1e100", the computed result is "-1e-100 + 1e100", which + is numerically exactly equal to "1e100". The function + "math.fmod()" returns a result whose sign matches the sign of the + first argument instead, and so returns "-1e-100" in this case. + Which approach is more appropriate depends on the application. + +[2] If x is very close to an exact integer multiple of y, it’s + possible for "x//y" to be one larger than "(x-x%y)//y" due to + rounding. In such cases, Python returns the latter result, in + order to preserve that "divmod(x,y)[0] * y + x % y" be very close + to "x". + +[3] The Unicode standard distinguishes between *code points* (e.g. + U+0041) and *abstract characters* (e.g. “LATIN CAPITAL LETTER Aâ€). + While most abstract characters in Unicode are only represented + using one code point, there is a number of abstract characters + that can in addition be represented using a sequence of more than + one code point. For example, the abstract character “LATIN + CAPITAL LETTER C WITH CEDILLA†can be represented as a single + *precomposed character* at code position U+00C7, or as a sequence + of a *base character* at code position U+0043 (LATIN CAPITAL + LETTER C), followed by a *combining character* at code position + U+0327 (COMBINING CEDILLA). + + The comparison operators on strings compare at the level of + Unicode code points. This may be counter-intuitive to humans. For + example, ""\u00C7" == "\u0043\u0327"" is "False", even though both + strings represent the same abstract character “LATIN CAPITAL + LETTER C WITH CEDILLAâ€. + + To compare strings at the level of abstract characters (that is, + in a way intuitive to humans), use "unicodedata.normalize()". + +[4] Due to automatic garbage-collection, free lists, and the dynamic + nature of descriptors, you may notice seemingly unusual behaviour + in certain uses of the "is" operator, like those involving + comparisons between instance methods, or constants. Check their + documentation for more info. + +[5] The power operator "**" binds less tightly than an arithmetic or + bitwise unary operator on its right, that is, "2**-1" is "0.5". + +[6] The "%" operator is also used for string formatting; the same + precedence applies. +''', + 'pass': r'''The "pass" statement +******************** + + pass_stmt ::= "pass" + +"pass" is a null operation — when it is executed, nothing happens. It +is useful as a placeholder when a statement is required syntactically, +but no code needs to be executed, for example: + + def f(arg): pass # a function that does nothing (yet) + + class C: pass # a class with no methods (yet) +''', + 'power': r'''The power operator +****************** + +The power operator binds more tightly than unary operators on its +left; it binds less tightly than unary operators on its right. The +syntax is: + + power ::= (await_expr | primary) ["**" u_expr] + +Thus, in an unparenthesized sequence of power and unary operators, the +operators are evaluated from right to left (this does not constrain +the evaluation order for the operands): "-1**2" results in "-1". + +The power operator has the same semantics as the built-in "pow()" +function, when called with two arguments: it yields its left argument +raised to the power of its right argument. The numeric arguments are +first converted to a common type, and the result is of that type. + +For int operands, the result has the same type as the operands unless +the second argument is negative; in that case, all arguments are +converted to float and a float result is delivered. For example, +"10**2" returns "100", but "10**-2" returns "0.01". + +Raising "0.0" to a negative power results in a "ZeroDivisionError". +Raising a negative number to a fractional power results in a "complex" +number. (In earlier versions it raised a "ValueError".) + +This operation can be customized using the special "__pow__()" and +"__rpow__()" methods. +''', + 'raise': r'''The "raise" statement +********************* + + raise_stmt ::= "raise" [expression ["from" expression]] + +If no expressions are present, "raise" re-raises the exception that is +currently being handled, which is also known as the *active +exception*. If there isn’t currently an active exception, a +"RuntimeError" exception is raised indicating that this is an error. + +Otherwise, "raise" evaluates the first expression as the exception +object. It must be either a subclass or an instance of +"BaseException". If it is a class, the exception instance will be +obtained when needed by instantiating the class with no arguments. + +The *type* of the exception is the exception instance’s class, the +*value* is the instance itself. + +A traceback object is normally created automatically when an exception +is raised and attached to it as the "__traceback__" attribute. You can +create an exception and set your own traceback in one step using the +"with_traceback()" exception method (which returns the same exception +instance, with its traceback set to its argument), like so: + + raise Exception("foo occurred").with_traceback(tracebackobj) + +The "from" clause is used for exception chaining: if given, the second +*expression* must be another exception class or instance. If the +second expression is an exception instance, it will be attached to the +raised exception as the "__cause__" attribute (which is writable). If +the expression is an exception class, the class will be instantiated +and the resulting exception instance will be attached to the raised +exception as the "__cause__" attribute. If the raised exception is not +handled, both exceptions will be printed: + + >>> try: + ... print(1 / 0) + ... except Exception as exc: + ... raise RuntimeError("Something bad happened") from exc + ... + Traceback (most recent call last): + File "", line 2, in + print(1 / 0) + ~~^~~ + ZeroDivisionError: division by zero + + The above exception was the direct cause of the following exception: + + Traceback (most recent call last): + File "", line 4, in + raise RuntimeError("Something bad happened") from exc + RuntimeError: Something bad happened + +A similar mechanism works implicitly if a new exception is raised when +an exception is already being handled. An exception may be handled +when an "except" or "finally" clause, or a "with" statement, is used. +The previous exception is then attached as the new exception’s +"__context__" attribute: + + >>> try: + ... print(1 / 0) + ... except: + ... raise RuntimeError("Something bad happened") + ... + Traceback (most recent call last): + File "", line 2, in + print(1 / 0) + ~~^~~ + ZeroDivisionError: division by zero + + During handling of the above exception, another exception occurred: + + Traceback (most recent call last): + File "", line 4, in + raise RuntimeError("Something bad happened") + RuntimeError: Something bad happened + +Exception chaining can be explicitly suppressed by specifying "None" +in the "from" clause: + + >>> try: + ... print(1 / 0) + ... except: + ... raise RuntimeError("Something bad happened") from None + ... + Traceback (most recent call last): + File "", line 4, in + RuntimeError: Something bad happened + +Additional information on exceptions can be found in section +Exceptions, and information about handling exceptions is in section +The try statement. + +Changed in version 3.3: "None" is now permitted as "Y" in "raise X +from Y".Added the "__suppress_context__" attribute to suppress +automatic display of the exception context. + +Changed in version 3.11: If the traceback of the active exception is +modified in an "except" clause, a subsequent "raise" statement re- +raises the exception with the modified traceback. Previously, the +exception was re-raised with the traceback it had when it was caught. +''', + 'return': r'''The "return" statement +********************** + + return_stmt ::= "return" [expression_list] + +"return" may only occur syntactically nested in a function definition, +not within a nested class definition. + +If an expression list is present, it is evaluated, else "None" is +substituted. + +"return" leaves the current function call with the expression list (or +"None") as return value. + +When "return" passes control out of a "try" statement with a "finally" +clause, that "finally" clause is executed before really leaving the +function. + +In a generator function, the "return" statement indicates that the +generator is done and will cause "StopIteration" to be raised. The +returned value (if any) is used as an argument to construct +"StopIteration" and becomes the "StopIteration.value" attribute. + +In an asynchronous generator function, an empty "return" statement +indicates that the asynchronous generator is done and will cause +"StopAsyncIteration" to be raised. A non-empty "return" statement is +a syntax error in an asynchronous generator function. +''', + 'sequence-types': r'''Emulating container types +************************* + +The following methods can be defined to implement container objects. +None of them are provided by the "object" class itself. Containers +usually are *sequences* (such as "lists" or "tuples") or *mappings* +(like *dictionaries*), but can represent other containers as well. +The first set of methods is used either to emulate a sequence or to +emulate a mapping; the difference is that for a sequence, the +allowable keys should be the integers *k* for which "0 <= k < N" where +*N* is the length of the sequence, or "slice" objects, which define a +range of items. It is also recommended that mappings provide the +methods "keys()", "values()", "items()", "get()", "clear()", +"setdefault()", "pop()", "popitem()", "copy()", and "update()" +behaving similar to those for Python’s standard "dictionary" objects. +The "collections.abc" module provides a "MutableMapping" *abstract +base class* to help create those methods from a base set of +"__getitem__()", "__setitem__()", "__delitem__()", and "keys()". +Mutable sequences should provide methods "append()", "count()", +"index()", "extend()", "insert()", "pop()", "remove()", "reverse()" +and "sort()", like Python standard "list" objects. Finally, sequence +types should implement addition (meaning concatenation) and +multiplication (meaning repetition) by defining the methods +"__add__()", "__radd__()", "__iadd__()", "__mul__()", "__rmul__()" and +"__imul__()" described below; they should not define other numerical +operators. It is recommended that both mappings and sequences +implement the "__contains__()" method to allow efficient use of the +"in" operator; for mappings, "in" should search the mapping’s keys; +for sequences, it should search through the values. It is further +recommended that both mappings and sequences implement the +"__iter__()" method to allow efficient iteration through the +container; for mappings, "__iter__()" should iterate through the +object’s keys; for sequences, it should iterate through the values. + +object.__len__(self) + + Called to implement the built-in function "len()". Should return + the length of the object, an integer ">=" 0. Also, an object that + doesn’t define a "__bool__()" method and whose "__len__()" method + returns zero is considered to be false in a Boolean context. + + **CPython implementation detail:** In CPython, the length is + required to be at most "sys.maxsize". If the length is larger than + "sys.maxsize" some features (such as "len()") may raise + "OverflowError". To prevent raising "OverflowError" by truth value + testing, an object must define a "__bool__()" method. + +object.__length_hint__(self) + + Called to implement "operator.length_hint()". Should return an + estimated length for the object (which may be greater or less than + the actual length). The length must be an integer ">=" 0. The + return value may also be "NotImplemented", which is treated the + same as if the "__length_hint__" method didn’t exist at all. This + method is purely an optimization and is never required for + correctness. + + Added in version 3.4. + +Note: + + Slicing is done exclusively with the following three methods. A + call like + + a[1:2] = b + + is translated to + + a[slice(1, 2, None)] = b + + and so forth. Missing slice items are always filled in with "None". + +object.__getitem__(self, key) + + Called to implement evaluation of "self[key]". For *sequence* + types, the accepted keys should be integers. Optionally, they may + support "slice" objects as well. Negative index support is also + optional. If *key* is of an inappropriate type, "TypeError" may be + raised; if *key* is a value outside the set of indexes for the + sequence (after any special interpretation of negative values), + "IndexError" should be raised. For *mapping* types, if *key* is + missing (not in the container), "KeyError" should be raised. + + Note: + + "for" loops expect that an "IndexError" will be raised for + illegal indexes to allow proper detection of the end of the + sequence. + + Note: + + When subscripting a *class*, the special class method + "__class_getitem__()" may be called instead of "__getitem__()". + See __class_getitem__ versus __getitem__ for more details. + +object.__setitem__(self, key, value) + + Called to implement assignment to "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support changes to the values for keys, or if new keys + can be added, or for sequences if elements can be replaced. The + same exceptions should be raised for improper *key* values as for + the "__getitem__()" method. + +object.__delitem__(self, key) + + Called to implement deletion of "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support removal of keys, or for sequences if elements + can be removed from the sequence. The same exceptions should be + raised for improper *key* values as for the "__getitem__()" method. + +object.__missing__(self, key) + + Called by "dict"."__getitem__()" to implement "self[key]" for dict + subclasses when key is not in the dictionary. + +object.__iter__(self) + + This method is called when an *iterator* is required for a + container. This method should return a new iterator object that can + iterate over all the objects in the container. For mappings, it + should iterate over the keys of the container. + +object.__reversed__(self) + + Called (if present) by the "reversed()" built-in to implement + reverse iteration. It should return a new iterator object that + iterates over all the objects in the container in reverse order. + + If the "__reversed__()" method is not provided, the "reversed()" + built-in will fall back to using the sequence protocol ("__len__()" + and "__getitem__()"). Objects that support the sequence protocol + should only provide "__reversed__()" if they can provide an + implementation that is more efficient than the one provided by + "reversed()". + +The membership test operators ("in" and "not in") are normally +implemented as an iteration through a container. However, container +objects can supply the following special method with a more efficient +implementation, which also does not require the object be iterable. + +object.__contains__(self, item) + + Called to implement membership test operators. Should return true + if *item* is in *self*, false otherwise. For mapping objects, this + should consider the keys of the mapping rather than the values or + the key-item pairs. + + For objects that don’t define "__contains__()", the membership test + first tries iteration via "__iter__()", then the old sequence + iteration protocol via "__getitem__()", see this section in the + language reference. +''', + 'shifting': r'''Shifting operations +******************* + +The shifting operations have lower priority than the arithmetic +operations: + + shift_expr ::= a_expr | shift_expr ("<<" | ">>") a_expr + +These operators accept integers as arguments. They shift the first +argument to the left or right by the number of bits given by the +second argument. + +The left shift operation can be customized using the special +"__lshift__()" and "__rlshift__()" methods. The right shift operation +can be customized using the special "__rshift__()" and "__rrshift__()" +methods. + +A right shift by *n* bits is defined as floor division by "pow(2,n)". +A left shift by *n* bits is defined as multiplication with "pow(2,n)". +''', + 'slicings': r'''Slicings +******** + +A slicing selects a range of items in a sequence object (e.g., a +string, tuple or list). Slicings may be used as expressions or as +targets in assignment or "del" statements. The syntax for a slicing: + + slicing ::= primary "[" slice_list "]" + slice_list ::= slice_item ("," slice_item)* [","] + slice_item ::= expression | proper_slice + proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" [stride] ] + lower_bound ::= expression + upper_bound ::= expression + stride ::= expression + +There is ambiguity in the formal syntax here: anything that looks like +an expression list also looks like a slice list, so any subscription +can be interpreted as a slicing. Rather than further complicating the +syntax, this is disambiguated by defining that in this case the +interpretation as a subscription takes priority over the +interpretation as a slicing (this is the case if the slice list +contains no proper slice). + +The semantics for a slicing are as follows. The primary is indexed +(using the same "__getitem__()" method as normal subscription) with a +key that is constructed from the slice list, as follows. If the slice +list contains at least one comma, the key is a tuple containing the +conversion of the slice items; otherwise, the conversion of the lone +slice item is the key. The conversion of a slice item that is an +expression is that expression. The conversion of a proper slice is a +slice object (see section The standard type hierarchy) whose "start", +"stop" and "step" attributes are the values of the expressions given +as lower bound, upper bound and stride, respectively, substituting +"None" for missing expressions. +''', + 'specialattrs': r'''Special Attributes +****************** + +The implementation adds a few special read-only attributes to several +object types, where they are relevant. Some of these are not reported +by the "dir()" built-in function. + +definition.__name__ + + The name of the class, function, method, descriptor, or generator + instance. + +definition.__qualname__ + + The *qualified name* of the class, function, method, descriptor, or + generator instance. + + Added in version 3.3. + +definition.__module__ + + The name of the module in which a class or function was defined. + +definition.__doc__ + + The documentation string of a class or function, or "None" if + undefined. + +definition.__type_params__ + + The type parameters of generic classes, functions, and type + aliases. For classes and functions that are not generic, this will + be an empty tuple. + + Added in version 3.12. +''', + 'specialnames': r'''Special method names +******************** + +A class can implement certain operations that are invoked by special +syntax (such as arithmetic operations or subscripting and slicing) by +defining methods with special names. This is Python’s approach to +*operator overloading*, allowing classes to define their own behavior +with respect to language operators. For instance, if a class defines +a method named "__getitem__()", and "x" is an instance of this class, +then "x[i]" is roughly equivalent to "type(x).__getitem__(x, i)". +Except where mentioned, attempts to execute an operation raise an +exception when no appropriate method is defined (typically +"AttributeError" or "TypeError"). + +Setting a special method to "None" indicates that the corresponding +operation is not available. For example, if a class sets "__iter__()" +to "None", the class is not iterable, so calling "iter()" on its +instances will raise a "TypeError" (without falling back to +"__getitem__()"). [2] + +When implementing a class that emulates any built-in type, it is +important that the emulation only be implemented to the degree that it +makes sense for the object being modelled. For example, some +sequences may work well with retrieval of individual elements, but +extracting a slice may not make sense. (One example of this is the +"NodeList" interface in the W3C’s Document Object Model.) + + +Basic customization +=================== + +object.__new__(cls[, ...]) + + Called to create a new instance of class *cls*. "__new__()" is a + static method (special-cased so you need not declare it as such) + that takes the class of which an instance was requested as its + first argument. The remaining arguments are those passed to the + object constructor expression (the call to the class). The return + value of "__new__()" should be the new object instance (usually an + instance of *cls*). + + Typical implementations create a new instance of the class by + invoking the superclass’s "__new__()" method using + "super().__new__(cls[, ...])" with appropriate arguments and then + modifying the newly created instance as necessary before returning + it. + + If "__new__()" is invoked during object construction and it returns + an instance of *cls*, then the new instance’s "__init__()" method + will be invoked like "__init__(self[, ...])", where *self* is the + new instance and the remaining arguments are the same as were + passed to the object constructor. + + If "__new__()" does not return an instance of *cls*, then the new + instance’s "__init__()" method will not be invoked. + + "__new__()" is intended mainly to allow subclasses of immutable + types (like int, str, or tuple) to customize instance creation. It + is also commonly overridden in custom metaclasses in order to + customize class creation. + +object.__init__(self[, ...]) + + Called after the instance has been created (by "__new__()"), but + before it is returned to the caller. The arguments are those + passed to the class constructor expression. If a base class has an + "__init__()" method, the derived class’s "__init__()" method, if + any, must explicitly call it to ensure proper initialization of the + base class part of the instance; for example: + "super().__init__([args...])". + + Because "__new__()" and "__init__()" work together in constructing + objects ("__new__()" to create it, and "__init__()" to customize + it), no non-"None" value may be returned by "__init__()"; doing so + will cause a "TypeError" to be raised at runtime. + +object.__del__(self) + + Called when the instance is about to be destroyed. This is also + called a finalizer or (improperly) a destructor. If a base class + has a "__del__()" method, the derived class’s "__del__()" method, + if any, must explicitly call it to ensure proper deletion of the + base class part of the instance. + + It is possible (though not recommended!) for the "__del__()" method + to postpone destruction of the instance by creating a new reference + to it. This is called object *resurrection*. It is + implementation-dependent whether "__del__()" is called a second + time when a resurrected object is about to be destroyed; the + current *CPython* implementation only calls it once. + + It is not guaranteed that "__del__()" methods are called for + objects that still exist when the interpreter exits. + "weakref.finalize" provides a straightforward way to register a + cleanup function to be called when an object is garbage collected. + + Note: + + "del x" doesn’t directly call "x.__del__()" — the former + decrements the reference count for "x" by one, and the latter is + only called when "x"’s reference count reaches zero. + + **CPython implementation detail:** It is possible for a reference + cycle to prevent the reference count of an object from going to + zero. In this case, the cycle will be later detected and deleted + by the *cyclic garbage collector*. A common cause of reference + cycles is when an exception has been caught in a local variable. + The frame’s locals then reference the exception, which references + its own traceback, which references the locals of all frames caught + in the traceback. + + See also: Documentation for the "gc" module. + + Warning: + + Due to the precarious circumstances under which "__del__()" + methods are invoked, exceptions that occur during their execution + are ignored, and a warning is printed to "sys.stderr" instead. + In particular: + + * "__del__()" can be invoked when arbitrary code is being + executed, including from any arbitrary thread. If "__del__()" + needs to take a lock or invoke any other blocking resource, it + may deadlock as the resource may already be taken by the code + that gets interrupted to execute "__del__()". + + * "__del__()" can be executed during interpreter shutdown. As a + consequence, the global variables it needs to access (including + other modules) may already have been deleted or set to "None". + Python guarantees that globals whose name begins with a single + underscore are deleted from their module before other globals + are deleted; if no other references to such globals exist, this + may help in assuring that imported modules are still available + at the time when the "__del__()" method is called. + +object.__repr__(self) + + Called by the "repr()" built-in function to compute the “official†+ string representation of an object. If at all possible, this + should look like a valid Python expression that could be used to + recreate an object with the same value (given an appropriate + environment). If this is not possible, a string of the form + "<...some useful description...>" should be returned. The return + value must be a string object. If a class defines "__repr__()" but + not "__str__()", then "__repr__()" is also used when an “informal†+ string representation of instances of that class is required. + + This is typically used for debugging, so it is important that the + representation is information-rich and unambiguous. A default + implementation is provided by the "object" class itself. + +object.__str__(self) + + Called by "str(object)", the default "__format__()" implementation, + and the built-in function "print()", to compute the “informal†or + nicely printable string representation of an object. The return + value must be a str object. + + This method differs from "object.__repr__()" in that there is no + expectation that "__str__()" return a valid Python expression: a + more convenient or concise representation can be used. + + The default implementation defined by the built-in type "object" + calls "object.__repr__()". + +object.__bytes__(self) + + Called by bytes to compute a byte-string representation of an + object. This should return a "bytes" object. The "object" class + itself does not provide this method. + +object.__format__(self, format_spec) + + Called by the "format()" built-in function, and by extension, + evaluation of formatted string literals and the "str.format()" + method, to produce a “formatted†string representation of an + object. The *format_spec* argument is a string that contains a + description of the formatting options desired. The interpretation + of the *format_spec* argument is up to the type implementing + "__format__()", however most classes will either delegate + formatting to one of the built-in types, or use a similar + formatting option syntax. + + See Format Specification Mini-Language for a description of the + standard formatting syntax. + + The return value must be a string object. + + The default implementation by the "object" class should be given an + empty *format_spec* string. It delegates to "__str__()". + + Changed in version 3.4: The __format__ method of "object" itself + raises a "TypeError" if passed any non-empty string. + + Changed in version 3.7: "object.__format__(x, '')" is now + equivalent to "str(x)" rather than "format(str(x), '')". + +object.__lt__(self, other) +object.__le__(self, other) +object.__eq__(self, other) +object.__ne__(self, other) +object.__gt__(self, other) +object.__ge__(self, other) + + These are the so-called “rich comparison†methods. The + correspondence between operator symbols and method names is as + follows: "xy" calls + "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)". + + A rich comparison method may return the singleton "NotImplemented" + if it does not implement the operation for a given pair of + arguments. By convention, "False" and "True" are returned for a + successful comparison. However, these methods can return any value, + so if the comparison operator is used in a Boolean context (e.g., + in the condition of an "if" statement), Python will call "bool()" + on the value to determine if the result is true or false. + + By default, "object" implements "__eq__()" by using "is", returning + "NotImplemented" in the case of a false comparison: "True if x is y + else NotImplemented". For "__ne__()", by default it delegates to + "__eq__()" and inverts the result unless it is "NotImplemented". + There are no other implied relationships among the comparison + operators or default implementations; for example, the truth of + "(x.__hash__". + + If a class that does not override "__eq__()" wishes to suppress + hash support, it should include "__hash__ = None" in the class + definition. A class which defines its own "__hash__()" that + explicitly raises a "TypeError" would be incorrectly identified as + hashable by an "isinstance(obj, collections.abc.Hashable)" call. + + Note: + + By default, the "__hash__()" values of str and bytes objects are + “salted†with an unpredictable random value. Although they + remain constant within an individual Python process, they are not + predictable between repeated invocations of Python.This is + intended to provide protection against a denial-of-service caused + by carefully chosen inputs that exploit the worst case + performance of a dict insertion, *O*(*n*^2) complexity. See + http://ocert.org/advisories/ocert-2011-003.html for + details.Changing hash values affects the iteration order of sets. + Python has never made guarantees about this ordering (and it + typically varies between 32-bit and 64-bit builds).See also + "PYTHONHASHSEED". + + Changed in version 3.3: Hash randomization is enabled by default. + +object.__bool__(self) + + Called to implement truth value testing and the built-in operation + "bool()"; should return "False" or "True". When this method is not + defined, "__len__()" is called, if it is defined, and the object is + considered true if its result is nonzero. If a class defines + neither "__len__()" nor "__bool__()" (which is true of the "object" + class itself), all its instances are considered true. + + +Customizing attribute access +============================ + +The following methods can be defined to customize the meaning of +attribute access (use of, assignment to, or deletion of "x.name") for +class instances. + +object.__getattr__(self, name) + + Called when the default attribute access fails with an + "AttributeError" (either "__getattribute__()" raises an + "AttributeError" because *name* is not an instance attribute or an + attribute in the class tree for "self"; or "__get__()" of a *name* + property raises "AttributeError"). This method should either + return the (computed) attribute value or raise an "AttributeError" + exception. The "object" class itself does not provide this method. + + Note that if the attribute is found through the normal mechanism, + "__getattr__()" is not called. (This is an intentional asymmetry + between "__getattr__()" and "__setattr__()".) This is done both for + efficiency reasons and because otherwise "__getattr__()" would have + no way to access other attributes of the instance. Note that at + least for instance variables, you can take total control by not + inserting any values in the instance attribute dictionary (but + instead inserting them in another object). See the + "__getattribute__()" method below for a way to actually get total + control over attribute access. + +object.__getattribute__(self, name) + + Called unconditionally to implement attribute accesses for + instances of the class. If the class also defines "__getattr__()", + the latter will not be called unless "__getattribute__()" either + calls it explicitly or raises an "AttributeError". This method + should return the (computed) attribute value or raise an + "AttributeError" exception. In order to avoid infinite recursion in + this method, its implementation should always call the base class + method with the same name to access any attributes it needs, for + example, "object.__getattribute__(self, name)". + + Note: + + This method may still be bypassed when looking up special methods + as the result of implicit invocation via language syntax or + built-in functions. See Special method lookup. + + For certain sensitive attribute accesses, raises an auditing event + "object.__getattr__" with arguments "obj" and "name". + +object.__setattr__(self, name, value) + + Called when an attribute assignment is attempted. This is called + instead of the normal mechanism (i.e. store the value in the + instance dictionary). *name* is the attribute name, *value* is the + value to be assigned to it. + + If "__setattr__()" wants to assign to an instance attribute, it + should call the base class method with the same name, for example, + "object.__setattr__(self, name, value)". + + For certain sensitive attribute assignments, raises an auditing + event "object.__setattr__" with arguments "obj", "name", "value". + +object.__delattr__(self, name) + + Like "__setattr__()" but for attribute deletion instead of + assignment. This should only be implemented if "del obj.name" is + meaningful for the object. + + For certain sensitive attribute deletions, raises an auditing event + "object.__delattr__" with arguments "obj" and "name". + +object.__dir__(self) + + Called when "dir()" is called on the object. An iterable must be + returned. "dir()" converts the returned iterable to a list and + sorts it. + + +Customizing module attribute access +----------------------------------- + +Special names "__getattr__" and "__dir__" can be also used to +customize access to module attributes. The "__getattr__" function at +the module level should accept one argument which is the name of an +attribute and return the computed value or raise an "AttributeError". +If an attribute is not found on a module object through the normal +lookup, i.e. "object.__getattribute__()", then "__getattr__" is +searched in the module "__dict__" before raising an "AttributeError". +If found, it is called with the attribute name and the result is +returned. + +The "__dir__" function should accept no arguments, and return an +iterable of strings that represents the names accessible on module. If +present, this function overrides the standard "dir()" search on a +module. + +For a more fine grained customization of the module behavior (setting +attributes, properties, etc.), one can set the "__class__" attribute +of a module object to a subclass of "types.ModuleType". For example: + + import sys + from types import ModuleType + + class VerboseModule(ModuleType): + def __repr__(self): + return f'Verbose {self.__name__}' + + def __setattr__(self, attr, value): + print(f'Setting {attr}...') + super().__setattr__(attr, value) + + sys.modules[__name__].__class__ = VerboseModule + +Note: + + Defining module "__getattr__" and setting module "__class__" only + affect lookups made using the attribute access syntax – directly + accessing the module globals (whether by code within the module, or + via a reference to the module’s globals dictionary) is unaffected. + +Changed in version 3.5: "__class__" module attribute is now writable. + +Added in version 3.7: "__getattr__" and "__dir__" module attributes. + +See also: + + **PEP 562** - Module __getattr__ and __dir__ + Describes the "__getattr__" and "__dir__" functions on modules. + + +Implementing Descriptors +------------------------ + +The following methods only apply when an instance of the class +containing the method (a so-called *descriptor* class) appears in an +*owner* class (the descriptor must be in either the owner’s class +dictionary or in the class dictionary for one of its parents). In the +examples below, “the attribute†refers to the attribute whose name is +the key of the property in the owner class’ "__dict__". The "object" +class itself does not implement any of these protocols. + +object.__get__(self, instance, owner=None) + + Called to get the attribute of the owner class (class attribute + access) or of an instance of that class (instance attribute + access). The optional *owner* argument is the owner class, while + *instance* is the instance that the attribute was accessed through, + or "None" when the attribute is accessed through the *owner*. + + This method should return the computed attribute value or raise an + "AttributeError" exception. + + **PEP 252** specifies that "__get__()" is callable with one or two + arguments. Python’s own built-in descriptors support this + specification; however, it is likely that some third-party tools + have descriptors that require both arguments. Python’s own + "__getattribute__()" implementation always passes in both arguments + whether they are required or not. + +object.__set__(self, instance, value) + + Called to set the attribute on an instance *instance* of the owner + class to a new value, *value*. + + Note, adding "__set__()" or "__delete__()" changes the kind of + descriptor to a “data descriptorâ€. See Invoking Descriptors for + more details. + +object.__delete__(self, instance) + + Called to delete the attribute on an instance *instance* of the + owner class. + +Instances of descriptors may also have the "__objclass__" attribute +present: + +object.__objclass__ + + The attribute "__objclass__" is interpreted by the "inspect" module + as specifying the class where this object was defined (setting this + appropriately can assist in runtime introspection of dynamic class + attributes). For callables, it may indicate that an instance of the + given type (or a subclass) is expected or required as the first + positional argument (for example, CPython sets this attribute for + unbound methods that are implemented in C). + + +Invoking Descriptors +-------------------- + +In general, a descriptor is an object attribute with “binding +behaviorâ€, one whose attribute access has been overridden by methods +in the descriptor protocol: "__get__()", "__set__()", and +"__delete__()". If any of those methods are defined for an object, it +is said to be a descriptor. + +The default behavior for attribute access is to get, set, or delete +the attribute from an object’s dictionary. For instance, "a.x" has a +lookup chain starting with "a.__dict__['x']", then +"type(a).__dict__['x']", and continuing through the base classes of +"type(a)" excluding metaclasses. + +However, if the looked-up value is an object defining one of the +descriptor methods, then Python may override the default behavior and +invoke the descriptor method instead. Where this occurs in the +precedence chain depends on which descriptor methods were defined and +how they were called. + +The starting point for descriptor invocation is a binding, "a.x". How +the arguments are assembled depends on "a": + +Direct Call + The simplest and least common call is when user code directly + invokes a descriptor method: "x.__get__(a)". + +Instance Binding + If binding to an object instance, "a.x" is transformed into the + call: "type(a).__dict__['x'].__get__(a, type(a))". + +Class Binding + If binding to a class, "A.x" is transformed into the call: + "A.__dict__['x'].__get__(None, A)". + +Super Binding + A dotted lookup such as "super(A, a).x" searches + "a.__class__.__mro__" for a base class "B" following "A" and then + returns "B.__dict__['x'].__get__(a, A)". If not a descriptor, "x" + is returned unchanged. + +For instance bindings, the precedence of descriptor invocation depends +on which descriptor methods are defined. A descriptor can define any +combination of "__get__()", "__set__()" and "__delete__()". If it +does not define "__get__()", then accessing the attribute will return +the descriptor object itself unless there is a value in the object’s +instance dictionary. If the descriptor defines "__set__()" and/or +"__delete__()", it is a data descriptor; if it defines neither, it is +a non-data descriptor. Normally, data descriptors define both +"__get__()" and "__set__()", while non-data descriptors have just the +"__get__()" method. Data descriptors with "__get__()" and "__set__()" +(and/or "__delete__()") defined always override a redefinition in an +instance dictionary. In contrast, non-data descriptors can be +overridden by instances. + +Python methods (including those decorated with "@staticmethod" and +"@classmethod") are implemented as non-data descriptors. Accordingly, +instances can redefine and override methods. This allows individual +instances to acquire behaviors that differ from other instances of the +same class. + +The "property()" function is implemented as a data descriptor. +Accordingly, instances cannot override the behavior of a property. + + +__slots__ +--------- + +*__slots__* allow us to explicitly declare data members (like +properties) and deny the creation of "__dict__" and *__weakref__* +(unless explicitly declared in *__slots__* or available in a parent.) + +The space saved over using "__dict__" can be significant. Attribute +lookup speed can be significantly improved as well. + +object.__slots__ + + This class variable can be assigned a string, iterable, or sequence + of strings with variable names used by instances. *__slots__* + reserves space for the declared variables and prevents the + automatic creation of "__dict__" and *__weakref__* for each + instance. + +Notes on using *__slots__*: + +* When inheriting from a class without *__slots__*, the "__dict__" and + *__weakref__* attribute of the instances will always be accessible. + +* Without a "__dict__" variable, instances cannot be assigned new + variables not listed in the *__slots__* definition. Attempts to + assign to an unlisted variable name raises "AttributeError". If + dynamic assignment of new variables is desired, then add + "'__dict__'" to the sequence of strings in the *__slots__* + declaration. + +* Without a *__weakref__* variable for each instance, classes defining + *__slots__* do not support "weak references" to its instances. If + weak reference support is needed, then add "'__weakref__'" to the + sequence of strings in the *__slots__* declaration. + +* *__slots__* are implemented at the class level by creating + descriptors for each variable name. As a result, class attributes + cannot be used to set default values for instance variables defined + by *__slots__*; otherwise, the class attribute would overwrite the + descriptor assignment. + +* The action of a *__slots__* declaration is not limited to the class + where it is defined. *__slots__* declared in parents are available + in child classes. However, instances of a child subclass will get a + "__dict__" and *__weakref__* unless the subclass also defines + *__slots__* (which should only contain names of any *additional* + slots). + +* If a class defines a slot also defined in a base class, the instance + variable defined by the base class slot is inaccessible (except by + retrieving its descriptor directly from the base class). This + renders the meaning of the program undefined. In the future, a + check may be added to prevent this. + +* "TypeError" will be raised if nonempty *__slots__* are defined for a + class derived from a ""variable-length" built-in type" such as + "int", "bytes", and "tuple". + +* Any non-string *iterable* may be assigned to *__slots__*. + +* If a "dictionary" is used to assign *__slots__*, the dictionary keys + will be used as the slot names. The values of the dictionary can be + used to provide per-attribute docstrings that will be recognised by + "inspect.getdoc()" and displayed in the output of "help()". + +* "__class__" assignment works only if both classes have the same + *__slots__*. + +* Multiple inheritance with multiple slotted parent classes can be + used, but only one parent is allowed to have attributes created by + slots (the other bases must have empty slot layouts) - violations + raise "TypeError". + +* If an *iterator* is used for *__slots__* then a *descriptor* is + created for each of the iterator’s values. However, the *__slots__* + attribute will be an empty iterator. + + +Customizing class creation +========================== + +Whenever a class inherits from another class, "__init_subclass__()" is +called on the parent class. This way, it is possible to write classes +which change the behavior of subclasses. This is closely related to +class decorators, but where class decorators only affect the specific +class they’re applied to, "__init_subclass__" solely applies to future +subclasses of the class defining the method. + +classmethod object.__init_subclass__(cls) + + This method is called whenever the containing class is subclassed. + *cls* is then the new subclass. If defined as a normal instance + method, this method is implicitly converted to a class method. + + Keyword arguments which are given to a new class are passed to the + parent class’s "__init_subclass__". For compatibility with other + classes using "__init_subclass__", one should take out the needed + keyword arguments and pass the others over to the base class, as + in: + + class Philosopher: + def __init_subclass__(cls, /, default_name, **kwargs): + super().__init_subclass__(**kwargs) + cls.default_name = default_name + + class AustralianPhilosopher(Philosopher, default_name="Bruce"): + pass + + The default implementation "object.__init_subclass__" does nothing, + but raises an error if it is called with any arguments. + + Note: + + The metaclass hint "metaclass" is consumed by the rest of the + type machinery, and is never passed to "__init_subclass__" + implementations. The actual metaclass (rather than the explicit + hint) can be accessed as "type(cls)". + + Added in version 3.6. + +When a class is created, "type.__new__()" scans the class variables +and makes callbacks to those with a "__set_name__()" hook. + +object.__set_name__(self, owner, name) + + Automatically called at the time the owning class *owner* is + created. The object has been assigned to *name* in that class: + + class A: + x = C() # Automatically calls: x.__set_name__(A, 'x') + + If the class variable is assigned after the class is created, + "__set_name__()" will not be called automatically. If needed, + "__set_name__()" can be called directly: + + class A: + pass + + c = C() + A.x = c # The hook is not called + c.__set_name__(A, 'x') # Manually invoke the hook + + See Creating the class object for more details. + + Added in version 3.6. + + +Metaclasses +----------- + +By default, classes are constructed using "type()". The class body is +executed in a new namespace and the class name is bound locally to the +result of "type(name, bases, namespace)". + +The class creation process can be customized by passing the +"metaclass" keyword argument in the class definition line, or by +inheriting from an existing class that included such an argument. In +the following example, both "MyClass" and "MySubclass" are instances +of "Meta": + + class Meta(type): + pass + + class MyClass(metaclass=Meta): + pass + + class MySubclass(MyClass): + pass + +Any other keyword arguments that are specified in the class definition +are passed through to all metaclass operations described below. + +When a class definition is executed, the following steps occur: + +* MRO entries are resolved; + +* the appropriate metaclass is determined; + +* the class namespace is prepared; + +* the class body is executed; + +* the class object is created. + + +Resolving MRO entries +--------------------- + +object.__mro_entries__(self, bases) + + If a base that appears in a class definition is not an instance of + "type", then an "__mro_entries__()" method is searched on the base. + If an "__mro_entries__()" method is found, the base is substituted + with the result of a call to "__mro_entries__()" when creating the + class. The method is called with the original bases tuple passed to + the *bases* parameter, and must return a tuple of classes that will + be used instead of the base. The returned tuple may be empty: in + these cases, the original base is ignored. + +See also: + + "types.resolve_bases()" + Dynamically resolve bases that are not instances of "type". + + "types.get_original_bases()" + Retrieve a class’s “original bases†prior to modifications by + "__mro_entries__()". + + **PEP 560** + Core support for typing module and generic types. + + +Determining the appropriate metaclass +------------------------------------- + +The appropriate metaclass for a class definition is determined as +follows: + +* if no bases and no explicit metaclass are given, then "type()" is + used; + +* if an explicit metaclass is given and it is *not* an instance of + "type()", then it is used directly as the metaclass; + +* if an instance of "type()" is given as the explicit metaclass, or + bases are defined, then the most derived metaclass is used. + +The most derived metaclass is selected from the explicitly specified +metaclass (if any) and the metaclasses (i.e. "type(cls)") of all +specified base classes. The most derived metaclass is one which is a +subtype of *all* of these candidate metaclasses. If none of the +candidate metaclasses meets that criterion, then the class definition +will fail with "TypeError". + + +Preparing the class namespace +----------------------------- + +Once the appropriate metaclass has been identified, then the class +namespace is prepared. If the metaclass has a "__prepare__" attribute, +it is called as "namespace = metaclass.__prepare__(name, bases, +**kwds)" (where the additional keyword arguments, if any, come from +the class definition). The "__prepare__" method should be implemented +as a "classmethod". The namespace returned by "__prepare__" is passed +in to "__new__", but when the final class object is created the +namespace is copied into a new "dict". + +If the metaclass has no "__prepare__" attribute, then the class +namespace is initialised as an empty ordered mapping. + +See also: + + **PEP 3115** - Metaclasses in Python 3000 + Introduced the "__prepare__" namespace hook + + +Executing the class body +------------------------ + +The class body is executed (approximately) as "exec(body, globals(), +namespace)". The key difference from a normal call to "exec()" is that +lexical scoping allows the class body (including any methods) to +reference names from the current and outer scopes when the class +definition occurs inside a function. + +However, even when the class definition occurs inside the function, +methods defined inside the class still cannot see names defined at the +class scope. Class variables must be accessed through the first +parameter of instance or class methods, or through the implicit +lexically scoped "__class__" reference described in the next section. + + +Creating the class object +------------------------- + +Once the class namespace has been populated by executing the class +body, the class object is created by calling "metaclass(name, bases, +namespace, **kwds)" (the additional keywords passed here are the same +as those passed to "__prepare__"). + +This class object is the one that will be referenced by the zero- +argument form of "super()". "__class__" is an implicit closure +reference created by the compiler if any methods in a class body refer +to either "__class__" or "super". This allows the zero argument form +of "super()" to correctly identify the class being defined based on +lexical scoping, while the class or instance that was used to make the +current call is identified based on the first argument passed to the +method. + +**CPython implementation detail:** In CPython 3.6 and later, the +"__class__" cell is passed to the metaclass as a "__classcell__" entry +in the class namespace. If present, this must be propagated up to the +"type.__new__" call in order for the class to be initialised +correctly. Failing to do so will result in a "RuntimeError" in Python +3.8. + +When using the default metaclass "type", or any metaclass that +ultimately calls "type.__new__", the following additional +customization steps are invoked after creating the class object: + +1. The "type.__new__" method collects all of the attributes in the + class namespace that define a "__set_name__()" method; + +2. Those "__set_name__" methods are called with the class being + defined and the assigned name of that particular attribute; + +3. The "__init_subclass__()" hook is called on the immediate parent of + the new class in its method resolution order. + +After the class object is created, it is passed to the class +decorators included in the class definition (if any) and the resulting +object is bound in the local namespace as the defined class. + +When a new class is created by "type.__new__", the object provided as +the namespace parameter is copied to a new ordered mapping and the +original object is discarded. The new copy is wrapped in a read-only +proxy, which becomes the "__dict__" attribute of the class object. + +See also: + + **PEP 3135** - New super + Describes the implicit "__class__" closure reference + + +Uses for metaclasses +-------------------- + +The potential uses for metaclasses are boundless. Some ideas that have +been explored include enum, logging, interface checking, automatic +delegation, automatic property creation, proxies, frameworks, and +automatic resource locking/synchronization. + + +Customizing instance and subclass checks +======================================== + +The following methods are used to override the default behavior of the +"isinstance()" and "issubclass()" built-in functions. + +In particular, the metaclass "abc.ABCMeta" implements these methods in +order to allow the addition of Abstract Base Classes (ABCs) as +“virtual base classes†to any class or type (including built-in +types), including other ABCs. + +type.__instancecheck__(self, instance) + + Return true if *instance* should be considered a (direct or + indirect) instance of *class*. If defined, called to implement + "isinstance(instance, class)". + +type.__subclasscheck__(self, subclass) + + Return true if *subclass* should be considered a (direct or + indirect) subclass of *class*. If defined, called to implement + "issubclass(subclass, class)". + +Note that these methods are looked up on the type (metaclass) of a +class. They cannot be defined as class methods in the actual class. +This is consistent with the lookup of special methods that are called +on instances, only in this case the instance is itself a class. + +See also: + + **PEP 3119** - Introducing Abstract Base Classes + Includes the specification for customizing "isinstance()" and + "issubclass()" behavior through "__instancecheck__()" and + "__subclasscheck__()", with motivation for this functionality in + the context of adding Abstract Base Classes (see the "abc" + module) to the language. + + +Emulating generic types +======================= + +When using *type annotations*, it is often useful to *parameterize* a +*generic type* using Python’s square-brackets notation. For example, +the annotation "list[int]" might be used to signify a "list" in which +all the elements are of type "int". + +See also: + + **PEP 484** - Type Hints + Introducing Python’s framework for type annotations + + Generic Alias Types + Documentation for objects representing parameterized generic + classes + + Generics, user-defined generics and "typing.Generic" + Documentation on how to implement generic classes that can be + parameterized at runtime and understood by static type-checkers. + +A class can *generally* only be parameterized if it defines the +special class method "__class_getitem__()". + +classmethod object.__class_getitem__(cls, key) + + Return an object representing the specialization of a generic class + by type arguments found in *key*. + + When defined on a class, "__class_getitem__()" is automatically a + class method. As such, there is no need for it to be decorated with + "@classmethod" when it is defined. + + +The purpose of *__class_getitem__* +---------------------------------- + +The purpose of "__class_getitem__()" is to allow runtime +parameterization of standard-library generic classes in order to more +easily apply *type hints* to these classes. + +To implement custom generic classes that can be parameterized at +runtime and understood by static type-checkers, users should either +inherit from a standard library class that already implements +"__class_getitem__()", or inherit from "typing.Generic", which has its +own implementation of "__class_getitem__()". + +Custom implementations of "__class_getitem__()" on classes defined +outside of the standard library may not be understood by third-party +type-checkers such as mypy. Using "__class_getitem__()" on any class +for purposes other than type hinting is discouraged. + + +*__class_getitem__* versus *__getitem__* +---------------------------------------- + +Usually, the subscription of an object using square brackets will call +the "__getitem__()" instance method defined on the object’s class. +However, if the object being subscribed is itself a class, the class +method "__class_getitem__()" may be called instead. +"__class_getitem__()" should return a GenericAlias object if it is +properly defined. + +Presented with the *expression* "obj[x]", the Python interpreter +follows something like the following process to decide whether +"__getitem__()" or "__class_getitem__()" should be called: + + from inspect import isclass + + def subscribe(obj, x): + """Return the result of the expression 'obj[x]'""" + + class_of_obj = type(obj) + + # If the class of obj defines __getitem__, + # call class_of_obj.__getitem__(obj, x) + if hasattr(class_of_obj, '__getitem__'): + return class_of_obj.__getitem__(obj, x) + + # Else, if obj is a class and defines __class_getitem__, + # call obj.__class_getitem__(x) + elif isclass(obj) and hasattr(obj, '__class_getitem__'): + return obj.__class_getitem__(x) + + # Else, raise an exception + else: + raise TypeError( + f"'{class_of_obj.__name__}' object is not subscriptable" + ) + +In Python, all classes are themselves instances of other classes. The +class of a class is known as that class’s *metaclass*, and most +classes have the "type" class as their metaclass. "type" does not +define "__getitem__()", meaning that expressions such as "list[int]", +"dict[str, float]" and "tuple[str, bytes]" all result in +"__class_getitem__()" being called: + + >>> # list has class "type" as its metaclass, like most classes: + >>> type(list) + + >>> type(dict) == type(list) == type(tuple) == type(str) == type(bytes) + True + >>> # "list[int]" calls "list.__class_getitem__(int)" + >>> list[int] + list[int] + >>> # list.__class_getitem__ returns a GenericAlias object: + >>> type(list[int]) + + +However, if a class has a custom metaclass that defines +"__getitem__()", subscribing the class may result in different +behaviour. An example of this can be found in the "enum" module: + + >>> from enum import Enum + >>> class Menu(Enum): + ... """A breakfast menu""" + ... SPAM = 'spam' + ... BACON = 'bacon' + ... + >>> # Enum classes have a custom metaclass: + >>> type(Menu) + + >>> # EnumMeta defines __getitem__, + >>> # so __class_getitem__ is not called, + >>> # and the result is not a GenericAlias object: + >>> Menu['SPAM'] + + >>> type(Menu['SPAM']) + + +See also: + + **PEP 560** - Core Support for typing module and generic types + Introducing "__class_getitem__()", and outlining when a + subscription results in "__class_getitem__()" being called + instead of "__getitem__()" + + +Emulating callable objects +========================== + +object.__call__(self[, args...]) + + Called when the instance is “called†as a function; if this method + is defined, "x(arg1, arg2, ...)" roughly translates to + "type(x).__call__(x, arg1, ...)". The "object" class itself does + not provide this method. + + +Emulating container types +========================= + +The following methods can be defined to implement container objects. +None of them are provided by the "object" class itself. Containers +usually are *sequences* (such as "lists" or "tuples") or *mappings* +(like *dictionaries*), but can represent other containers as well. +The first set of methods is used either to emulate a sequence or to +emulate a mapping; the difference is that for a sequence, the +allowable keys should be the integers *k* for which "0 <= k < N" where +*N* is the length of the sequence, or "slice" objects, which define a +range of items. It is also recommended that mappings provide the +methods "keys()", "values()", "items()", "get()", "clear()", +"setdefault()", "pop()", "popitem()", "copy()", and "update()" +behaving similar to those for Python’s standard "dictionary" objects. +The "collections.abc" module provides a "MutableMapping" *abstract +base class* to help create those methods from a base set of +"__getitem__()", "__setitem__()", "__delitem__()", and "keys()". +Mutable sequences should provide methods "append()", "count()", +"index()", "extend()", "insert()", "pop()", "remove()", "reverse()" +and "sort()", like Python standard "list" objects. Finally, sequence +types should implement addition (meaning concatenation) and +multiplication (meaning repetition) by defining the methods +"__add__()", "__radd__()", "__iadd__()", "__mul__()", "__rmul__()" and +"__imul__()" described below; they should not define other numerical +operators. It is recommended that both mappings and sequences +implement the "__contains__()" method to allow efficient use of the +"in" operator; for mappings, "in" should search the mapping’s keys; +for sequences, it should search through the values. It is further +recommended that both mappings and sequences implement the +"__iter__()" method to allow efficient iteration through the +container; for mappings, "__iter__()" should iterate through the +object’s keys; for sequences, it should iterate through the values. + +object.__len__(self) + + Called to implement the built-in function "len()". Should return + the length of the object, an integer ">=" 0. Also, an object that + doesn’t define a "__bool__()" method and whose "__len__()" method + returns zero is considered to be false in a Boolean context. + + **CPython implementation detail:** In CPython, the length is + required to be at most "sys.maxsize". If the length is larger than + "sys.maxsize" some features (such as "len()") may raise + "OverflowError". To prevent raising "OverflowError" by truth value + testing, an object must define a "__bool__()" method. + +object.__length_hint__(self) + + Called to implement "operator.length_hint()". Should return an + estimated length for the object (which may be greater or less than + the actual length). The length must be an integer ">=" 0. The + return value may also be "NotImplemented", which is treated the + same as if the "__length_hint__" method didn’t exist at all. This + method is purely an optimization and is never required for + correctness. + + Added in version 3.4. + +Note: + + Slicing is done exclusively with the following three methods. A + call like + + a[1:2] = b + + is translated to + + a[slice(1, 2, None)] = b + + and so forth. Missing slice items are always filled in with "None". + +object.__getitem__(self, key) + + Called to implement evaluation of "self[key]". For *sequence* + types, the accepted keys should be integers. Optionally, they may + support "slice" objects as well. Negative index support is also + optional. If *key* is of an inappropriate type, "TypeError" may be + raised; if *key* is a value outside the set of indexes for the + sequence (after any special interpretation of negative values), + "IndexError" should be raised. For *mapping* types, if *key* is + missing (not in the container), "KeyError" should be raised. + + Note: + + "for" loops expect that an "IndexError" will be raised for + illegal indexes to allow proper detection of the end of the + sequence. + + Note: + + When subscripting a *class*, the special class method + "__class_getitem__()" may be called instead of "__getitem__()". + See __class_getitem__ versus __getitem__ for more details. + +object.__setitem__(self, key, value) + + Called to implement assignment to "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support changes to the values for keys, or if new keys + can be added, or for sequences if elements can be replaced. The + same exceptions should be raised for improper *key* values as for + the "__getitem__()" method. + +object.__delitem__(self, key) + + Called to implement deletion of "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support removal of keys, or for sequences if elements + can be removed from the sequence. The same exceptions should be + raised for improper *key* values as for the "__getitem__()" method. + +object.__missing__(self, key) + + Called by "dict"."__getitem__()" to implement "self[key]" for dict + subclasses when key is not in the dictionary. + +object.__iter__(self) + + This method is called when an *iterator* is required for a + container. This method should return a new iterator object that can + iterate over all the objects in the container. For mappings, it + should iterate over the keys of the container. + +object.__reversed__(self) + + Called (if present) by the "reversed()" built-in to implement + reverse iteration. It should return a new iterator object that + iterates over all the objects in the container in reverse order. + + If the "__reversed__()" method is not provided, the "reversed()" + built-in will fall back to using the sequence protocol ("__len__()" + and "__getitem__()"). Objects that support the sequence protocol + should only provide "__reversed__()" if they can provide an + implementation that is more efficient than the one provided by + "reversed()". + +The membership test operators ("in" and "not in") are normally +implemented as an iteration through a container. However, container +objects can supply the following special method with a more efficient +implementation, which also does not require the object be iterable. + +object.__contains__(self, item) + + Called to implement membership test operators. Should return true + if *item* is in *self*, false otherwise. For mapping objects, this + should consider the keys of the mapping rather than the values or + the key-item pairs. + + For objects that don’t define "__contains__()", the membership test + first tries iteration via "__iter__()", then the old sequence + iteration protocol via "__getitem__()", see this section in the + language reference. + + +Emulating numeric types +======================= + +The following methods can be defined to emulate numeric objects. +Methods corresponding to operations that are not supported by the +particular kind of number implemented (e.g., bitwise operations for +non-integral numbers) should be left undefined. + +object.__add__(self, other) +object.__sub__(self, other) +object.__mul__(self, other) +object.__matmul__(self, other) +object.__truediv__(self, other) +object.__floordiv__(self, other) +object.__mod__(self, other) +object.__divmod__(self, other) +object.__pow__(self, other[, modulo]) +object.__lshift__(self, other) +object.__rshift__(self, other) +object.__and__(self, other) +object.__xor__(self, other) +object.__or__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, to + evaluate the expression "x + y", where *x* is an instance of a + class that has an "__add__()" method, "type(x).__add__(x, y)" is + called. The "__divmod__()" method should be the equivalent to + using "__floordiv__()" and "__mod__()"; it should not be related to + "__truediv__()". Note that "__pow__()" should be defined to accept + an optional third argument if the ternary version of the built-in + "pow()" function is to be supported. + + If one of those methods does not support the operation with the + supplied arguments, it should return "NotImplemented". + +object.__radd__(self, other) +object.__rsub__(self, other) +object.__rmul__(self, other) +object.__rmatmul__(self, other) +object.__rtruediv__(self, other) +object.__rfloordiv__(self, other) +object.__rmod__(self, other) +object.__rdivmod__(self, other) +object.__rpow__(self, other[, modulo]) +object.__rlshift__(self, other) +object.__rrshift__(self, other) +object.__rand__(self, other) +object.__rxor__(self, other) +object.__ror__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|") with reflected (swapped) + operands. These functions are only called if the operands are of + different types, when the left operand does not support the + corresponding operation [3], or the right operand’s class is + derived from the left operand’s class. [4] For instance, to + evaluate the expression "x - y", where *y* is an instance of a + class that has an "__rsub__()" method, "type(y).__rsub__(y, x)" is + called if "type(x).__sub__(x, y)" returns "NotImplemented" or + "type(y)" is a subclass of "type(x)". [5] + + Note that ternary "pow()" will not try calling "__rpow__()" (the + coercion rules would become too complicated). + + Note: + + If the right operand’s type is a subclass of the left operand’s + type and that subclass provides a different implementation of the + reflected method for the operation, this method will be called + before the left operand’s non-reflected method. This behavior + allows subclasses to override their ancestors’ operations. + +object.__iadd__(self, other) +object.__isub__(self, other) +object.__imul__(self, other) +object.__imatmul__(self, other) +object.__itruediv__(self, other) +object.__ifloordiv__(self, other) +object.__imod__(self, other) +object.__ipow__(self, other[, modulo]) +object.__ilshift__(self, other) +object.__irshift__(self, other) +object.__iand__(self, other) +object.__ixor__(self, other) +object.__ior__(self, other) + + These methods are called to implement the augmented arithmetic + assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", "**=", + "<<=", ">>=", "&=", "^=", "|="). These methods should attempt to + do the operation in-place (modifying *self*) and return the result + (which could be, but does not have to be, *self*). If a specific + method is not defined, or if that method returns "NotImplemented", + the augmented assignment falls back to the normal methods. For + instance, if *x* is an instance of a class with an "__iadd__()" + method, "x += y" is equivalent to "x = x.__iadd__(y)" . If + "__iadd__()" does not exist, or if "x.__iadd__(y)" returns + "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are + considered, as with the evaluation of "x + y". In certain + situations, augmented assignment can result in unexpected errors + (see Why does a_tuple[i] += [‘item’] raise an exception when the + addition works?), but this behavior is in fact part of the data + model. + +object.__neg__(self) +object.__pos__(self) +object.__abs__(self) +object.__invert__(self) + + Called to implement the unary arithmetic operations ("-", "+", + "abs()" and "~"). + +object.__complex__(self) +object.__int__(self) +object.__float__(self) + + Called to implement the built-in functions "complex()", "int()" and + "float()". Should return a value of the appropriate type. + +object.__index__(self) + + Called to implement "operator.index()", and whenever Python needs + to losslessly convert the numeric object to an integer object (such + as in slicing, or in the built-in "bin()", "hex()" and "oct()" + functions). Presence of this method indicates that the numeric + object is an integer type. Must return an integer. + + If "__int__()", "__float__()" and "__complex__()" are not defined + then corresponding built-in functions "int()", "float()" and + "complex()" fall back to "__index__()". + +object.__round__(self[, ndigits]) +object.__trunc__(self) +object.__floor__(self) +object.__ceil__(self) + + Called to implement the built-in function "round()" and "math" + functions "trunc()", "floor()" and "ceil()". Unless *ndigits* is + passed to "__round__()" all these methods should return the value + of the object truncated to an "Integral" (typically an "int"). + + Changed in version 3.14: "int()" no longer delegates to the + "__trunc__()" method. + + +With Statement Context Managers +=============================== + +A *context manager* is an object that defines the runtime context to +be established when executing a "with" statement. The context manager +handles the entry into, and the exit from, the desired runtime context +for the execution of the block of code. Context managers are normally +invoked using the "with" statement (described in section The with +statement), but can also be used by directly invoking their methods. + +Typical uses of context managers include saving and restoring various +kinds of global state, locking and unlocking resources, closing opened +files, etc. + +For more information on context managers, see Context Manager Types. +The "object" class itself does not provide the context manager +methods. + +object.__enter__(self) + + Enter the runtime context related to this object. The "with" + statement will bind this method’s return value to the target(s) + specified in the "as" clause of the statement, if any. + +object.__exit__(self, exc_type, exc_value, traceback) + + Exit the runtime context related to this object. The parameters + describe the exception that caused the context to be exited. If the + context was exited without an exception, all three arguments will + be "None". + + If an exception is supplied, and the method wishes to suppress the + exception (i.e., prevent it from being propagated), it should + return a true value. Otherwise, the exception will be processed + normally upon exit from this method. + + Note that "__exit__()" methods should not reraise the passed-in + exception; this is the caller’s responsibility. + +See also: + + **PEP 343** - The “with†statement + The specification, background, and examples for the Python "with" + statement. + + +Customizing positional arguments in class pattern matching +========================================================== + +When using a class name in a pattern, positional arguments in the +pattern are not allowed by default, i.e. "case MyClass(x, y)" is +typically invalid without special support in "MyClass". To be able to +use that kind of pattern, the class needs to define a *__match_args__* +attribute. + +object.__match_args__ + + This class variable can be assigned a tuple of strings. When this + class is used in a class pattern with positional arguments, each + positional argument will be converted into a keyword argument, + using the corresponding value in *__match_args__* as the keyword. + The absence of this attribute is equivalent to setting it to "()". + +For example, if "MyClass.__match_args__" is "("left", "center", +"right")" that means that "case MyClass(x, y)" is equivalent to "case +MyClass(left=x, center=y)". Note that the number of arguments in the +pattern must be smaller than or equal to the number of elements in +*__match_args__*; if it is larger, the pattern match attempt will +raise a "TypeError". + +Added in version 3.10. + +See also: + + **PEP 634** - Structural Pattern Matching + The specification for the Python "match" statement. + + +Emulating buffer types +====================== + +The buffer protocol provides a way for Python objects to expose +efficient access to a low-level memory array. This protocol is +implemented by builtin types such as "bytes" and "memoryview", and +third-party libraries may define additional buffer types. + +While buffer types are usually implemented in C, it is also possible +to implement the protocol in Python. + +object.__buffer__(self, flags) + + Called when a buffer is requested from *self* (for example, by the + "memoryview" constructor). The *flags* argument is an integer + representing the kind of buffer requested, affecting for example + whether the returned buffer is read-only or writable. + "inspect.BufferFlags" provides a convenient way to interpret the + flags. The method must return a "memoryview" object. + +object.__release_buffer__(self, buffer) + + Called when a buffer is no longer needed. The *buffer* argument is + a "memoryview" object that was previously returned by + "__buffer__()". The method must release any resources associated + with the buffer. This method should return "None". Buffer objects + that do not need to perform any cleanup are not required to + implement this method. + +Added in version 3.12. + +See also: + + **PEP 688** - Making the buffer protocol accessible in Python + Introduces the Python "__buffer__" and "__release_buffer__" + methods. + + "collections.abc.Buffer" + ABC for buffer types. + + +Annotations +=========== + +Functions, classes, and modules may contain *annotations*, which are a +way to associate information (usually *type hints*) with a symbol. + +object.__annotations__ + + This attribute contains the annotations for an object. It is lazily + evaluated, so accessing the attribute may execute arbitrary code + and raise exceptions. If evaluation is successful, the attribute is + set to a dictionary mapping from variable names to annotations. + + Changed in version 3.14: Annotations are now lazily evaluated. + +object.__annotate__(format) + + An *annotate function*. Returns a new dictionary object mapping + attribute/parameter names to their annotation values. + + Takes a format parameter specifying the format in which annotations + values should be provided. It must be a member of the + "annotationlib.Format" enum, or an integer with a value + corresponding to a member of the enum. + + If an annotate function doesn’t support the requested format, it + must raise "NotImplementedError". Annotate functions must always + support "VALUE" format; they must not raise "NotImplementedError()" + when called with this format. + + When called with "VALUE" format, an annotate function may raise + "NameError"; it must not raise "NameError" when called requesting + any other format. + + If an object does not have any annotations, "__annotate__" should + preferably be set to "None" (it can’t be deleted), rather than set + to a function that returns an empty dict. + + Added in version 3.14. + +See also: + + **PEP 649** — Deferred evaluation of annotation using descriptors + Introduces lazy evaluation of annotations and the "__annotate__" + function. + + +Special method lookup +===================== + +For custom classes, implicit invocations of special methods are only +guaranteed to work correctly if defined on an object’s type, not in +the object’s instance dictionary. That behaviour is the reason why +the following code raises an exception: + + >>> class C: + ... pass + ... + >>> c = C() + >>> c.__len__ = lambda: 5 + >>> len(c) + Traceback (most recent call last): + File "", line 1, in + TypeError: object of type 'C' has no len() + +The rationale behind this behaviour lies with a number of special +methods such as "__hash__()" and "__repr__()" that are implemented by +all objects, including type objects. If the implicit lookup of these +methods used the conventional lookup process, they would fail when +invoked on the type object itself: + + >>> 1 .__hash__() == hash(1) + True + >>> int.__hash__() == hash(int) + Traceback (most recent call last): + File "", line 1, in + TypeError: descriptor '__hash__' of 'int' object needs an argument + +Incorrectly attempting to invoke an unbound method of a class in this +way is sometimes referred to as ‘metaclass confusion’, and is avoided +by bypassing the instance when looking up special methods: + + >>> type(1).__hash__(1) == hash(1) + True + >>> type(int).__hash__(int) == hash(int) + True + +In addition to bypassing any instance attributes in the interest of +correctness, implicit special method lookup generally also bypasses +the "__getattribute__()" method even of the object’s metaclass: + + >>> class Meta(type): + ... def __getattribute__(*args): + ... print("Metaclass getattribute invoked") + ... return type.__getattribute__(*args) + ... + >>> class C(object, metaclass=Meta): + ... def __len__(self): + ... return 10 + ... def __getattribute__(*args): + ... print("Class getattribute invoked") + ... return object.__getattribute__(*args) + ... + >>> c = C() + >>> c.__len__() # Explicit lookup via instance + Class getattribute invoked + 10 + >>> type(c).__len__(c) # Explicit lookup via type + Metaclass getattribute invoked + 10 + >>> len(c) # Implicit lookup + 10 + +Bypassing the "__getattribute__()" machinery in this fashion provides +significant scope for speed optimisations within the interpreter, at +the cost of some flexibility in the handling of special methods (the +special method *must* be set on the class object itself in order to be +consistently invoked by the interpreter). +''', + 'string-methods': r'''String Methods +************** + +Strings implement all of the common sequence operations, along with +the additional methods described below. + +Strings also support two styles of string formatting, one providing a +large degree of flexibility and customization (see "str.format()", +Format String Syntax and Custom String Formatting) and the other based +on C "printf" style formatting that handles a narrower range of types +and is slightly harder to use correctly, but is often faster for the +cases it can handle (printf-style String Formatting). + +The Text Processing Services section of the standard library covers a +number of other modules that provide various text related utilities +(including regular expression support in the "re" module). + +str.capitalize() + + Return a copy of the string with its first character capitalized + and the rest lowercased. + + Changed in version 3.8: The first character is now put into + titlecase rather than uppercase. This means that characters like + digraphs will only have their first letter capitalized, instead of + the full character. + +str.casefold() + + Return a casefolded copy of the string. Casefolded strings may be + used for caseless matching. + + Casefolding is similar to lowercasing but more aggressive because + it is intended to remove all case distinctions in a string. For + example, the German lowercase letter "'ß'" is equivalent to ""ss"". + Since it is already lowercase, "lower()" would do nothing to "'ß'"; + "casefold()" converts it to ""ss"". + + The casefolding algorithm is described in section 3.13 ‘Default + Case Folding’ of the Unicode Standard. + + Added in version 3.3. + +str.center(width[, fillchar]) + + Return centered in a string of length *width*. Padding is done + using the specified *fillchar* (default is an ASCII space). The + original string is returned if *width* is less than or equal to + "len(s)". + +str.count(sub[, start[, end]]) + + Return the number of non-overlapping occurrences of substring *sub* + in the range [*start*, *end*]. Optional arguments *start* and + *end* are interpreted as in slice notation. + + If *sub* is empty, returns the number of empty strings between + characters which is the length of the string plus one. + +str.encode(encoding='utf-8', errors='strict') + + Return the string encoded to "bytes". + + *encoding* defaults to "'utf-8'"; see Standard Encodings for + possible values. + + *errors* controls how encoding errors are handled. If "'strict'" + (the default), a "UnicodeError" exception is raised. Other possible + values are "'ignore'", "'replace'", "'xmlcharrefreplace'", + "'backslashreplace'" and any other name registered via + "codecs.register_error()". See Error Handlers for details. + + For performance reasons, the value of *errors* is not checked for + validity unless an encoding error actually occurs, Python + Development Mode is enabled or a debug build is used. + + Changed in version 3.1: Added support for keyword arguments. + + Changed in version 3.9: The value of the *errors* argument is now + checked in Python Development Mode and in debug mode. + +str.endswith(suffix[, start[, end]]) + + Return "True" if the string ends with the specified *suffix*, + otherwise return "False". *suffix* can also be a tuple of suffixes + to look for. With optional *start*, test beginning at that + position. With optional *end*, stop comparing at that position. + +str.expandtabs(tabsize=8) + + Return a copy of the string where all tab characters are replaced + by one or more spaces, depending on the current column and the + given tab size. Tab positions occur every *tabsize* characters + (default is 8, giving tab positions at columns 0, 8, 16 and so on). + To expand the string, the current column is set to zero and the + string is examined character by character. If the character is a + tab ("\t"), one or more space characters are inserted in the result + until the current column is equal to the next tab position. (The + tab character itself is not copied.) If the character is a newline + ("\n") or return ("\r"), it is copied and the current column is + reset to zero. Any other character is copied unchanged and the + current column is incremented by one regardless of how the + character is represented when printed. + + >>> '01\t012\t0123\t01234'.expandtabs() + '01 012 0123 01234' + >>> '01\t012\t0123\t01234'.expandtabs(4) + '01 012 0123 01234' + +str.find(sub[, start[, end]]) + + Return the lowest index in the string where substring *sub* is + found within the slice "s[start:end]". Optional arguments *start* + and *end* are interpreted as in slice notation. Return "-1" if + *sub* is not found. + + Note: + + The "find()" method should be used only if you need to know the + position of *sub*. To check if *sub* is a substring or not, use + the "in" operator: + + >>> 'Py' in 'Python' + True + +str.format(*args, **kwargs) + + Perform a string formatting operation. The string on which this + method is called can contain literal text or replacement fields + delimited by braces "{}". Each replacement field contains either + the numeric index of a positional argument, or the name of a + keyword argument. Returns a copy of the string where each + replacement field is replaced with the string value of the + corresponding argument. + + >>> "The sum of 1 + 2 is {0}".format(1+2) + 'The sum of 1 + 2 is 3' + + See Format String Syntax for a description of the various + formatting options that can be specified in format strings. + + Note: + + When formatting a number ("int", "float", "complex", + "decimal.Decimal" and subclasses) with the "n" type (ex: + "'{:n}'.format(1234)"), the function temporarily sets the + "LC_CTYPE" locale to the "LC_NUMERIC" locale to decode + "decimal_point" and "thousands_sep" fields of "localeconv()" if + they are non-ASCII or longer than 1 byte, and the "LC_NUMERIC" + locale is different than the "LC_CTYPE" locale. This temporary + change affects other threads. + + Changed in version 3.7: When formatting a number with the "n" type, + the function sets temporarily the "LC_CTYPE" locale to the + "LC_NUMERIC" locale in some cases. + +str.format_map(mapping, /) + + Similar to "str.format(**mapping)", except that "mapping" is used + directly and not copied to a "dict". This is useful if for example + "mapping" is a dict subclass: + + >>> class Default(dict): + ... def __missing__(self, key): + ... return key + ... + >>> '{name} was born in {country}'.format_map(Default(name='Guido')) + 'Guido was born in country' + + Added in version 3.2. + +str.index(sub[, start[, end]]) + + Like "find()", but raise "ValueError" when the substring is not + found. + +str.isalnum() + + Return "True" if all characters in the string are alphanumeric and + there is at least one character, "False" otherwise. A character + "c" is alphanumeric if one of the following returns "True": + "c.isalpha()", "c.isdecimal()", "c.isdigit()", or "c.isnumeric()". + +str.isalpha() + + Return "True" if all characters in the string are alphabetic and + there is at least one character, "False" otherwise. Alphabetic + characters are those characters defined in the Unicode character + database as “Letterâ€, i.e., those with general category property + being one of “Lmâ€, “Ltâ€, “Luâ€, “Llâ€, or “Loâ€. Note that this is + different from the Alphabetic property defined in the section 4.10 + ‘Letters, Alphabetic, and Ideographic’ of the Unicode Standard. + +str.isascii() + + Return "True" if the string is empty or all characters in the + string are ASCII, "False" otherwise. ASCII characters have code + points in the range U+0000-U+007F. + + Added in version 3.7. + +str.isdecimal() + + Return "True" if all characters in the string are decimal + characters and there is at least one character, "False" otherwise. + Decimal characters are those that can be used to form numbers in + base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. Formally a decimal + character is a character in the Unicode General Category “Ndâ€. + +str.isdigit() + + Return "True" if all characters in the string are digits and there + is at least one character, "False" otherwise. Digits include + decimal characters and digits that need special handling, such as + the compatibility superscript digits. This covers digits which + cannot be used to form numbers in base 10, like the Kharosthi + numbers. Formally, a digit is a character that has the property + value Numeric_Type=Digit or Numeric_Type=Decimal. + +str.isidentifier() + + Return "True" if the string is a valid identifier according to the + language definition, section Identifiers and keywords. + + "keyword.iskeyword()" can be used to test whether string "s" is a + reserved identifier, such as "def" and "class". + + Example: + + >>> from keyword import iskeyword + + >>> 'hello'.isidentifier(), iskeyword('hello') + (True, False) + >>> 'def'.isidentifier(), iskeyword('def') + (True, True) + +str.islower() + + Return "True" if all cased characters [4] in the string are + lowercase and there is at least one cased character, "False" + otherwise. + +str.isnumeric() + + Return "True" if all characters in the string are numeric + characters, and there is at least one character, "False" otherwise. + Numeric characters include digit characters, and all characters + that have the Unicode numeric value property, e.g. U+2155, VULGAR + FRACTION ONE FIFTH. Formally, numeric characters are those with + the property value Numeric_Type=Digit, Numeric_Type=Decimal or + Numeric_Type=Numeric. + +str.isprintable() + + Return "True" if all characters in the string are printable or the + string is empty, "False" otherwise. Nonprintable characters are + those characters defined in the Unicode character database as + “Other†or “Separatorâ€, excepting the ASCII space (0x20) which is + considered printable. (Note that printable characters in this + context are those which should not be escaped when "repr()" is + invoked on a string. It has no bearing on the handling of strings + written to "sys.stdout" or "sys.stderr".) + +str.isspace() + + Return "True" if there are only whitespace characters in the string + and there is at least one character, "False" otherwise. + + A character is *whitespace* if in the Unicode character database + (see "unicodedata"), either its general category is "Zs" + (“Separator, spaceâ€), or its bidirectional class is one of "WS", + "B", or "S". + +str.istitle() + + Return "True" if the string is a titlecased string and there is at + least one character, for example uppercase characters may only + follow uncased characters and lowercase characters only cased ones. + Return "False" otherwise. + +str.isupper() + + Return "True" if all cased characters [4] in the string are + uppercase and there is at least one cased character, "False" + otherwise. + + >>> 'BANANA'.isupper() + True + >>> 'banana'.isupper() + False + >>> 'baNana'.isupper() + False + >>> ' '.isupper() + False + +str.join(iterable) + + Return a string which is the concatenation of the strings in + *iterable*. A "TypeError" will be raised if there are any non- + string values in *iterable*, including "bytes" objects. The + separator between elements is the string providing this method. + +str.ljust(width[, fillchar]) + + Return the string left justified in a string of length *width*. + Padding is done using the specified *fillchar* (default is an ASCII + space). The original string is returned if *width* is less than or + equal to "len(s)". + +str.lower() + + Return a copy of the string with all the cased characters [4] + converted to lowercase. + + The lowercasing algorithm used is described in section 3.13 + ‘Default Case Folding’ of the Unicode Standard. + +str.lstrip([chars]) + + Return a copy of the string with leading characters removed. The + *chars* argument is a string specifying the set of characters to be + removed. If omitted or "None", the *chars* argument defaults to + removing whitespace. The *chars* argument is not a prefix; rather, + all combinations of its values are stripped: + + >>> ' spacious '.lstrip() + 'spacious ' + >>> 'www.example.com'.lstrip('cmowz.') + 'example.com' + + See "str.removeprefix()" for a method that will remove a single + prefix string rather than all of a set of characters. For example: + + >>> 'Arthur: three!'.lstrip('Arthur: ') + 'ee!' + >>> 'Arthur: three!'.removeprefix('Arthur: ') + 'three!' + +static str.maketrans(x[, y[, z]]) + + This static method returns a translation table usable for + "str.translate()". + + If there is only one argument, it must be a dictionary mapping + Unicode ordinals (integers) or characters (strings of length 1) to + Unicode ordinals, strings (of arbitrary lengths) or "None". + Character keys will then be converted to ordinals. + + If there are two arguments, they must be strings of equal length, + and in the resulting dictionary, each character in x will be mapped + to the character at the same position in y. If there is a third + argument, it must be a string, whose characters will be mapped to + "None" in the result. + +str.partition(sep) + + Split the string at the first occurrence of *sep*, and return a + 3-tuple containing the part before the separator, the separator + itself, and the part after the separator. If the separator is not + found, return a 3-tuple containing the string itself, followed by + two empty strings. + +str.removeprefix(prefix, /) + + If the string starts with the *prefix* string, return + "string[len(prefix):]". Otherwise, return a copy of the original + string: + + >>> 'TestHook'.removeprefix('Test') + 'Hook' + >>> 'BaseTestCase'.removeprefix('Test') + 'BaseTestCase' + + Added in version 3.9. + +str.removesuffix(suffix, /) + + If the string ends with the *suffix* string and that *suffix* is + not empty, return "string[:-len(suffix)]". Otherwise, return a copy + of the original string: + + >>> 'MiscTests'.removesuffix('Tests') + 'Misc' + >>> 'TmpDirMixin'.removesuffix('Tests') + 'TmpDirMixin' + + Added in version 3.9. + +str.replace(old, new, count=-1) + + Return a copy of the string with all occurrences of substring *old* + replaced by *new*. If *count* is given, only the first *count* + occurrences are replaced. If *count* is not specified or "-1", then + all occurrences are replaced. + + Changed in version 3.13: *count* is now supported as a keyword + argument. + +str.rfind(sub[, start[, end]]) + + Return the highest index in the string where substring *sub* is + found, such that *sub* is contained within "s[start:end]". + Optional arguments *start* and *end* are interpreted as in slice + notation. Return "-1" on failure. + +str.rindex(sub[, start[, end]]) + + Like "rfind()" but raises "ValueError" when the substring *sub* is + not found. + +str.rjust(width[, fillchar]) + + Return the string right justified in a string of length *width*. + Padding is done using the specified *fillchar* (default is an ASCII + space). The original string is returned if *width* is less than or + equal to "len(s)". + +str.rpartition(sep) + + Split the string at the last occurrence of *sep*, and return a + 3-tuple containing the part before the separator, the separator + itself, and the part after the separator. If the separator is not + found, return a 3-tuple containing two empty strings, followed by + the string itself. + +str.rsplit(sep=None, maxsplit=-1) + + Return a list of the words in the string, using *sep* as the + delimiter string. If *maxsplit* is given, at most *maxsplit* splits + are done, the *rightmost* ones. If *sep* is not specified or + "None", any whitespace string is a separator. Except for splitting + from the right, "rsplit()" behaves like "split()" which is + described in detail below. + +str.rstrip([chars]) + + Return a copy of the string with trailing characters removed. The + *chars* argument is a string specifying the set of characters to be + removed. If omitted or "None", the *chars* argument defaults to + removing whitespace. The *chars* argument is not a suffix; rather, + all combinations of its values are stripped: + + >>> ' spacious '.rstrip() + ' spacious' + >>> 'mississippi'.rstrip('ipz') + 'mississ' + + See "str.removesuffix()" for a method that will remove a single + suffix string rather than all of a set of characters. For example: + + >>> 'Monty Python'.rstrip(' Python') + 'M' + >>> 'Monty Python'.removesuffix(' Python') + 'Monty' + +str.split(sep=None, maxsplit=-1) + + Return a list of the words in the string, using *sep* as the + delimiter string. If *maxsplit* is given, at most *maxsplit* + splits are done (thus, the list will have at most "maxsplit+1" + elements). If *maxsplit* is not specified or "-1", then there is + no limit on the number of splits (all possible splits are made). + + If *sep* is given, consecutive delimiters are not grouped together + and are deemed to delimit empty strings (for example, + "'1,,2'.split(',')" returns "['1', '', '2']"). The *sep* argument + may consist of multiple characters as a single delimiter (to split + with multiple delimiters, use "re.split()"). Splitting an empty + string with a specified separator returns "['']". + + For example: + + >>> '1,2,3'.split(',') + ['1', '2', '3'] + >>> '1,2,3'.split(',', maxsplit=1) + ['1', '2,3'] + >>> '1,2,,3,'.split(',') + ['1', '2', '', '3', ''] + >>> '1<>2<>3<4'.split('<>') + ['1', '2', '3<4'] + + If *sep* is not specified or is "None", a different splitting + algorithm is applied: runs of consecutive whitespace are regarded + as a single separator, and the result will contain no empty strings + at the start or end if the string has leading or trailing + whitespace. Consequently, splitting an empty string or a string + consisting of just whitespace with a "None" separator returns "[]". + + For example: + + >>> '1 2 3'.split() + ['1', '2', '3'] + >>> '1 2 3'.split(maxsplit=1) + ['1', '2 3'] + >>> ' 1 2 3 '.split() + ['1', '2', '3'] + +str.splitlines(keepends=False) + + Return a list of the lines in the string, breaking at line + boundaries. Line breaks are not included in the resulting list + unless *keepends* is given and true. + + This method splits on the following line boundaries. In + particular, the boundaries are a superset of *universal newlines*. + + +-------------------------+-------------------------------+ + | Representation | Description | + |=========================|===============================| + | "\n" | Line Feed | + +-------------------------+-------------------------------+ + | "\r" | Carriage Return | + +-------------------------+-------------------------------+ + | "\r\n" | Carriage Return + Line Feed | + +-------------------------+-------------------------------+ + | "\v" or "\x0b" | Line Tabulation | + +-------------------------+-------------------------------+ + | "\f" or "\x0c" | Form Feed | + +-------------------------+-------------------------------+ + | "\x1c" | File Separator | + +-------------------------+-------------------------------+ + | "\x1d" | Group Separator | + +-------------------------+-------------------------------+ + | "\x1e" | Record Separator | + +-------------------------+-------------------------------+ + | "\x85" | Next Line (C1 Control Code) | + +-------------------------+-------------------------------+ + | "\u2028" | Line Separator | + +-------------------------+-------------------------------+ + | "\u2029" | Paragraph Separator | + +-------------------------+-------------------------------+ + + Changed in version 3.2: "\v" and "\f" added to list of line + boundaries. + + For example: + + >>> 'ab c\n\nde fg\rkl\r\n'.splitlines() + ['ab c', '', 'de fg', 'kl'] + >>> 'ab c\n\nde fg\rkl\r\n'.splitlines(keepends=True) + ['ab c\n', '\n', 'de fg\r', 'kl\r\n'] + + Unlike "split()" when a delimiter string *sep* is given, this + method returns an empty list for the empty string, and a terminal + line break does not result in an extra line: + + >>> "".splitlines() + [] + >>> "One line\n".splitlines() + ['One line'] + + For comparison, "split('\n')" gives: + + >>> ''.split('\n') + [''] + >>> 'Two lines\n'.split('\n') + ['Two lines', ''] + +str.startswith(prefix[, start[, end]]) + + Return "True" if string starts with the *prefix*, otherwise return + "False". *prefix* can also be a tuple of prefixes to look for. + With optional *start*, test string beginning at that position. + With optional *end*, stop comparing string at that position. + +str.strip([chars]) + + Return a copy of the string with the leading and trailing + characters removed. The *chars* argument is a string specifying the + set of characters to be removed. If omitted or "None", the *chars* + argument defaults to removing whitespace. The *chars* argument is + not a prefix or suffix; rather, all combinations of its values are + stripped: + + >>> ' spacious '.strip() + 'spacious' + >>> 'www.example.com'.strip('cmowz.') + 'example' + + The outermost leading and trailing *chars* argument values are + stripped from the string. Characters are removed from the leading + end until reaching a string character that is not contained in the + set of characters in *chars*. A similar action takes place on the + trailing end. For example: + + >>> comment_string = '#....... Section 3.2.1 Issue #32 .......' + >>> comment_string.strip('.#! ') + 'Section 3.2.1 Issue #32' + +str.swapcase() + + Return a copy of the string with uppercase characters converted to + lowercase and vice versa. Note that it is not necessarily true that + "s.swapcase().swapcase() == s". + +str.title() + + Return a titlecased version of the string where words start with an + uppercase character and the remaining characters are lowercase. + + For example: + + >>> 'Hello world'.title() + 'Hello World' + + The algorithm uses a simple language-independent definition of a + word as groups of consecutive letters. The definition works in + many contexts but it means that apostrophes in contractions and + possessives form word boundaries, which may not be the desired + result: + + >>> "they're bill's friends from the UK".title() + "They'Re Bill'S Friends From The Uk" + + The "string.capwords()" function does not have this problem, as it + splits words on spaces only. + + Alternatively, a workaround for apostrophes can be constructed + using regular expressions: + + >>> import re + >>> def titlecase(s): + ... return re.sub(r"[A-Za-z]+('[A-Za-z]+)?", + ... lambda mo: mo.group(0).capitalize(), + ... s) + ... + >>> titlecase("they're bill's friends.") + "They're Bill's Friends." + +str.translate(table) + + Return a copy of the string in which each character has been mapped + through the given translation table. The table must be an object + that implements indexing via "__getitem__()", typically a *mapping* + or *sequence*. When indexed by a Unicode ordinal (an integer), the + table object can do any of the following: return a Unicode ordinal + or a string, to map the character to one or more other characters; + return "None", to delete the character from the return string; or + raise a "LookupError" exception, to map the character to itself. + + You can use "str.maketrans()" to create a translation map from + character-to-character mappings in different formats. + + See also the "codecs" module for a more flexible approach to custom + character mappings. + +str.upper() + + Return a copy of the string with all the cased characters [4] + converted to uppercase. Note that "s.upper().isupper()" might be + "False" if "s" contains uncased characters or if the Unicode + category of the resulting character(s) is not “Lu†(Letter, + uppercase), but e.g. “Lt†(Letter, titlecase). + + The uppercasing algorithm used is described in section 3.13 + ‘Default Case Folding’ of the Unicode Standard. + +str.zfill(width) + + Return a copy of the string left filled with ASCII "'0'" digits to + make a string of length *width*. A leading sign prefix + ("'+'"/"'-'") is handled by inserting the padding *after* the sign + character rather than before. The original string is returned if + *width* is less than or equal to "len(s)". + + For example: + + >>> "42".zfill(5) + '00042' + >>> "-42".zfill(5) + '-0042' +''', + 'strings': '''String and Bytes literals +************************* + +String literals are described by the following lexical definitions: + + stringliteral ::= [stringprefix](shortstring | longstring) + stringprefix ::= "r" | "u" | "R" | "U" | "f" | "F" + | "fr" | "Fr" | "fR" | "FR" | "rf" | "rF" | "Rf" | "RF" + shortstring ::= "'" shortstringitem* "'" | '"' shortstringitem* '"' + longstring ::= "\'\'\'" longstringitem* "\'\'\'" | '"""' longstringitem* '"""' + shortstringitem ::= shortstringchar | stringescapeseq + longstringitem ::= longstringchar | stringescapeseq + shortstringchar ::= + longstringchar ::= + stringescapeseq ::= "\\" + + bytesliteral ::= bytesprefix(shortbytes | longbytes) + bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB" + shortbytes ::= "'" shortbytesitem* "'" | '"' shortbytesitem* '"' + longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | '"""' longbytesitem* '"""' + shortbytesitem ::= shortbyteschar | bytesescapeseq + longbytesitem ::= longbyteschar | bytesescapeseq + shortbyteschar ::= + longbyteschar ::= + bytesescapeseq ::= "\\" + +One syntactic restriction not indicated by these productions is that +whitespace is not allowed between the "stringprefix" or "bytesprefix" +and the rest of the literal. The source character set is defined by +the encoding declaration; it is UTF-8 if no encoding declaration is +given in the source file; see section Encoding declarations. + +In plain English: Both types of literals can be enclosed in matching +single quotes ("'") or double quotes ("""). They can also be enclosed +in matching groups of three single or double quotes (these are +generally referred to as *triple-quoted strings*). The backslash ("\\") +character is used to give special meaning to otherwise ordinary +characters like "n", which means ‘newline’ when escaped ("\\n"). It can +also be used to escape characters that otherwise have a special +meaning, such as newline, backslash itself, or the quote character. +See escape sequences below for examples. + +Bytes literals are always prefixed with "'b'" or "'B'"; they produce +an instance of the "bytes" type instead of the "str" type. They may +only contain ASCII characters; bytes with a numeric value of 128 or +greater must be expressed with escapes. + +Both string and bytes literals may optionally be prefixed with a +letter "'r'" or "'R'"; such constructs are called *raw string +literals* and *raw bytes literals* respectively and treat backslashes +as literal characters. As a result, in raw string literals, "'\\U'" +and "'\\u'" escapes are not treated specially. + +Added in version 3.3: The "'rb'" prefix of raw bytes literals has been +added as a synonym of "'br'".Support for the unicode legacy literal +("u'value'") was reintroduced to simplify the maintenance of dual +Python 2.x and 3.x codebases. See **PEP 414** for more information. + +A string literal with "'f'" or "'F'" in its prefix is a *formatted +string literal*; see f-strings. The "'f'" may be combined with "'r'", +but not with "'b'" or "'u'", therefore raw formatted strings are +possible, but formatted bytes literals are not. + +In triple-quoted literals, unescaped newlines and quotes are allowed +(and are retained), except that three unescaped quotes in a row +terminate the literal. (A “quote†is the character used to open the +literal, i.e. either "'" or """.) + + +Escape sequences +================ + +Unless an "'r'" or "'R'" prefix is present, escape sequences in string +and bytes literals are interpreted according to rules similar to those +used by Standard C. The recognized escape sequences are: + ++---------------------------+-----------------------------------+---------+ +| Escape Sequence | Meaning | Notes | +|===========================|===================================|=========| +| "\\" | Backslash and newline ignored | (1) | ++---------------------------+-----------------------------------+---------+ +| "\\\\" | Backslash ("\\") | | ++---------------------------+-----------------------------------+---------+ +| "\\'" | Single quote ("'") | | ++---------------------------+-----------------------------------+---------+ +| "\\"" | Double quote (""") | | ++---------------------------+-----------------------------------+---------+ +| "\\a" | ASCII Bell (BEL) | | ++---------------------------+-----------------------------------+---------+ +| "\\b" | ASCII Backspace (BS) | | ++---------------------------+-----------------------------------+---------+ +| "\\f" | ASCII Formfeed (FF) | | ++---------------------------+-----------------------------------+---------+ +| "\\n" | ASCII Linefeed (LF) | | ++---------------------------+-----------------------------------+---------+ +| "\\r" | ASCII Carriage Return (CR) | | ++---------------------------+-----------------------------------+---------+ +| "\\t" | ASCII Horizontal Tab (TAB) | | ++---------------------------+-----------------------------------+---------+ +| "\\v" | ASCII Vertical Tab (VT) | | ++---------------------------+-----------------------------------+---------+ +| "\\*ooo*" | Character with octal value *ooo* | (2,4) | ++---------------------------+-----------------------------------+---------+ +| "\\x*hh*" | Character with hex value *hh* | (3,4) | ++---------------------------+-----------------------------------+---------+ + +Escape sequences only recognized in string literals are: + ++---------------------------+-----------------------------------+---------+ +| Escape Sequence | Meaning | Notes | +|===========================|===================================|=========| +| "\\N{*name*}" | Character named *name* in the | (5) | +| | Unicode database | | ++---------------------------+-----------------------------------+---------+ +| "\\u*xxxx*" | Character with 16-bit hex value | (6) | +| | *xxxx* | | ++---------------------------+-----------------------------------+---------+ +| "\\U*xxxxxxxx*" | Character with 32-bit hex value | (7) | +| | *xxxxxxxx* | | ++---------------------------+-----------------------------------+---------+ + +Notes: + +1. A backslash can be added at the end of a line to ignore the + newline: + + >>> 'This string will not include \\ + ... backslashes or newline characters.' + 'This string will not include backslashes or newline characters.' + + The same result can be achieved using triple-quoted strings, or + parentheses and string literal concatenation. + +2. As in Standard C, up to three octal digits are accepted. + + Changed in version 3.11: Octal escapes with value larger than + "0o377" produce a "DeprecationWarning". + + Changed in version 3.12: Octal escapes with value larger than + "0o377" produce a "SyntaxWarning". In a future Python version they + will be eventually a "SyntaxError". + +3. Unlike in Standard C, exactly two hex digits are required. + +4. In a bytes literal, hexadecimal and octal escapes denote the byte + with the given value. In a string literal, these escapes denote a + Unicode character with the given value. + +5. Changed in version 3.3: Support for name aliases [1] has been + added. + +6. Exactly four hex digits are required. + +7. Any Unicode character can be encoded this way. Exactly eight hex + digits are required. + +Unlike Standard C, all unrecognized escape sequences are left in the +string unchanged, i.e., *the backslash is left in the result*. (This +behavior is useful when debugging: if an escape sequence is mistyped, +the resulting output is more easily recognized as broken.) It is also +important to note that the escape sequences only recognized in string +literals fall into the category of unrecognized escapes for bytes +literals. + +Changed in version 3.6: Unrecognized escape sequences produce a +"DeprecationWarning". + +Changed in version 3.12: Unrecognized escape sequences produce a +"SyntaxWarning". In a future Python version they will be eventually a +"SyntaxError". + +Even in a raw literal, quotes can be escaped with a backslash, but the +backslash remains in the result; for example, "r"\\""" is a valid +string literal consisting of two characters: a backslash and a double +quote; "r"\\"" is not a valid string literal (even a raw string cannot +end in an odd number of backslashes). Specifically, *a raw literal +cannot end in a single backslash* (since the backslash would escape +the following quote character). Note also that a single backslash +followed by a newline is interpreted as those two characters as part +of the literal, *not* as a line continuation. +''', + 'subscriptions': r'''Subscriptions +************* + +The subscription of an instance of a container class will generally +select an element from the container. The subscription of a *generic +class* will generally return a GenericAlias object. + + subscription ::= primary "[" flexible_expression_list "]" + +When an object is subscripted, the interpreter will evaluate the +primary and the expression list. + +The primary must evaluate to an object that supports subscription. An +object may support subscription through defining one or both of +"__getitem__()" and "__class_getitem__()". When the primary is +subscripted, the evaluated result of the expression list will be +passed to one of these methods. For more details on when +"__class_getitem__" is called instead of "__getitem__", see +__class_getitem__ versus __getitem__. + +If the expression list contains at least one comma, or if any of the +expressions are starred, the expression list will evaluate to a +"tuple" containing the items of the expression list. Otherwise, the +expression list will evaluate to the value of the list’s sole member. + +Changed in version 3.11: Expressions in an expression list may be +starred. See **PEP 646**. + +For built-in objects, there are two types of objects that support +subscription via "__getitem__()": + +1. Mappings. If the primary is a *mapping*, the expression list must + evaluate to an object whose value is one of the keys of the + mapping, and the subscription selects the value in the mapping that + corresponds to that key. An example of a builtin mapping class is + the "dict" class. + +2. Sequences. If the primary is a *sequence*, the expression list must + evaluate to an "int" or a "slice" (as discussed in the following + section). Examples of builtin sequence classes include the "str", + "list" and "tuple" classes. + +The formal syntax makes no special provision for negative indices in +*sequences*. However, built-in sequences all provide a "__getitem__()" +method that interprets negative indices by adding the length of the +sequence to the index so that, for example, "x[-1]" selects the last +item of "x". The resulting value must be a nonnegative integer less +than the number of items in the sequence, and the subscription selects +the item whose index is that value (counting from zero). Since the +support for negative indices and slicing occurs in the object’s +"__getitem__()" method, subclasses overriding this method will need to +explicitly add that support. + +A "string" is a special kind of sequence whose items are *characters*. +A character is not a separate data type but a string of exactly one +character. +''', + 'truth': r'''Truth Value Testing +******************* + +Any object can be tested for truth value, for use in an "if" or +"while" condition or as operand of the Boolean operations below. + +By default, an object is considered true unless its class defines +either a "__bool__()" method that returns "False" or a "__len__()" +method that returns zero, when called with the object. [1] Here are +most of the built-in objects considered false: + +* constants defined to be false: "None" and "False" + +* zero of any numeric type: "0", "0.0", "0j", "Decimal(0)", + "Fraction(0, 1)" + +* empty sequences and collections: "''", "()", "[]", "{}", "set()", + "range(0)" + +Operations and built-in functions that have a Boolean result always +return "0" or "False" for false and "1" or "True" for true, unless +otherwise stated. (Important exception: the Boolean operations "or" +and "and" always return one of their operands.) +''', + 'try': r'''The "try" statement +******************* + +The "try" statement specifies exception handlers and/or cleanup code +for a group of statements: + + try_stmt ::= try1_stmt | try2_stmt | try3_stmt + try1_stmt ::= "try" ":" suite + ("except" [expression ["as" identifier]] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try2_stmt ::= "try" ":" suite + ("except" "*" expression ["as" identifier] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try3_stmt ::= "try" ":" suite + "finally" ":" suite + +Additional information on exceptions can be found in section +Exceptions, and information on using the "raise" statement to generate +exceptions may be found in section The raise statement. + + +"except" clause +=============== + +The "except" clause(s) specify one or more exception handlers. When no +exception occurs in the "try" clause, no exception handler is +executed. When an exception occurs in the "try" suite, a search for an +exception handler is started. This search inspects the "except" +clauses in turn until one is found that matches the exception. An +expression-less "except" clause, if present, must be last; it matches +any exception. + +For an "except" clause with an expression, the expression must +evaluate to an exception type or a tuple of exception types. The +raised exception matches an "except" clause whose expression evaluates +to the class or a *non-virtual base class* of the exception object, or +to a tuple that contains such a class. + +If no "except" clause matches the exception, the search for an +exception handler continues in the surrounding code and on the +invocation stack. [1] + +If the evaluation of an expression in the header of an "except" clause +raises an exception, the original search for a handler is canceled and +a search starts for the new exception in the surrounding code and on +the call stack (it is treated as if the entire "try" statement raised +the exception). + +When a matching "except" clause is found, the exception is assigned to +the target specified after the "as" keyword in that "except" clause, +if present, and the "except" clause’s suite is executed. All "except" +clauses must have an executable block. When the end of this block is +reached, execution continues normally after the entire "try" +statement. (This means that if two nested handlers exist for the same +exception, and the exception occurs in the "try" clause of the inner +handler, the outer handler will not handle the exception.) + +When an exception has been assigned using "as target", it is cleared +at the end of the "except" clause. This is as if + + except E as N: + foo + +was translated to + + except E as N: + try: + foo + finally: + del N + +This means the exception must be assigned to a different name to be +able to refer to it after the "except" clause. Exceptions are cleared +because with the traceback attached to them, they form a reference +cycle with the stack frame, keeping all locals in that frame alive +until the next garbage collection occurs. + +Before an "except" clause’s suite is executed, the exception is stored +in the "sys" module, where it can be accessed from within the body of +the "except" clause by calling "sys.exception()". When leaving an +exception handler, the exception stored in the "sys" module is reset +to its previous value: + + >>> print(sys.exception()) + None + >>> try: + ... raise TypeError + ... except: + ... print(repr(sys.exception())) + ... try: + ... raise ValueError + ... except: + ... print(repr(sys.exception())) + ... print(repr(sys.exception())) + ... + TypeError() + ValueError() + TypeError() + >>> print(sys.exception()) + None + + +"except*" clause +================ + +The "except*" clause(s) are used for handling "ExceptionGroup"s. The +exception type for matching is interpreted as in the case of "except", +but in the case of exception groups we can have partial matches when +the type matches some of the exceptions in the group. This means that +multiple "except*" clauses can execute, each handling part of the +exception group. Each clause executes at most once and handles an +exception group of all matching exceptions. Each exception in the +group is handled by at most one "except*" clause, the first that +matches it. + + >>> try: + ... raise ExceptionGroup("eg", + ... [ValueError(1), TypeError(2), OSError(3), OSError(4)]) + ... except* TypeError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... except* OSError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... + caught with nested (TypeError(2),) + caught with nested (OSError(3), OSError(4)) + + Exception Group Traceback (most recent call last): + | File "", line 2, in + | ExceptionGroup: eg + +-+---------------- 1 ---------------- + | ValueError: 1 + +------------------------------------ + +Any remaining exceptions that were not handled by any "except*" clause +are re-raised at the end, along with all exceptions that were raised +from within the "except*" clauses. If this list contains more than one +exception to reraise, they are combined into an exception group. + +If the raised exception is not an exception group and its type matches +one of the "except*" clauses, it is caught and wrapped by an exception +group with an empty message string. + + >>> try: + ... raise BlockingIOError + ... except* BlockingIOError as e: + ... print(repr(e)) + ... + ExceptionGroup('', (BlockingIOError())) + +An "except*" clause must have a matching expression; it cannot be +"except*:". Furthermore, this expression cannot contain exception +group types, because that would have ambiguous semantics. + +It is not possible to mix "except" and "except*" in the same "try". +"break", "continue" and "return" cannot appear in an "except*" clause. + + +"else" clause +============= + +The optional "else" clause is executed if the control flow leaves the +"try" suite, no exception was raised, and no "return", "continue", or +"break" statement was executed. Exceptions in the "else" clause are +not handled by the preceding "except" clauses. + + +"finally" clause +================ + +If "finally" is present, it specifies a ‘cleanup’ handler. The "try" +clause is executed, including any "except" and "else" clauses. If an +exception occurs in any of the clauses and is not handled, the +exception is temporarily saved. The "finally" clause is executed. If +there is a saved exception it is re-raised at the end of the "finally" +clause. If the "finally" clause raises another exception, the saved +exception is set as the context of the new exception. If the "finally" +clause executes a "return", "break" or "continue" statement, the saved +exception is discarded: + + >>> def f(): + ... try: + ... 1/0 + ... finally: + ... return 42 + ... + >>> f() + 42 + +The exception information is not available to the program during +execution of the "finally" clause. + +When a "return", "break" or "continue" statement is executed in the +"try" suite of a "try"…"finally" statement, the "finally" clause is +also executed ‘on the way out.’ + +The return value of a function is determined by the last "return" +statement executed. Since the "finally" clause always executes, a +"return" statement executed in the "finally" clause will always be the +last one executed: + + >>> def foo(): + ... try: + ... return 'try' + ... finally: + ... return 'finally' + ... + >>> foo() + 'finally' + +Changed in version 3.8: Prior to Python 3.8, a "continue" statement +was illegal in the "finally" clause due to a problem with the +implementation. +''', + 'types': r'''The standard type hierarchy +*************************** + +Below is a list of the types that are built into Python. Extension +modules (written in C, Java, or other languages, depending on the +implementation) can define additional types. Future versions of +Python may add types to the type hierarchy (e.g., rational numbers, +efficiently stored arrays of integers, etc.), although such additions +will often be provided via the standard library instead. + +Some of the type descriptions below contain a paragraph listing +‘special attributes.’ These are attributes that provide access to the +implementation and are not intended for general use. Their definition +may change in the future. + + +None +==== + +This type has a single value. There is a single object with this +value. This object is accessed through the built-in name "None". It is +used to signify the absence of a value in many situations, e.g., it is +returned from functions that don’t explicitly return anything. Its +truth value is false. + + +NotImplemented +============== + +This type has a single value. There is a single object with this +value. This object is accessed through the built-in name +"NotImplemented". Numeric methods and rich comparison methods should +return this value if they do not implement the operation for the +operands provided. (The interpreter will then try the reflected +operation, or some other fallback, depending on the operator.) It +should not be evaluated in a boolean context. + +See Implementing the arithmetic operations for more details. + +Changed in version 3.9: Evaluating "NotImplemented" in a boolean +context was deprecated. + +Changed in version 3.14: Evaluating "NotImplemented" in a boolean +context now raises a "TypeError". It previously evaluated to "True" +and emitted a "DeprecationWarning" since Python 3.9. + + +Ellipsis +======== + +This type has a single value. There is a single object with this +value. This object is accessed through the literal "..." or the built- +in name "Ellipsis". Its truth value is true. + + +"numbers.Number" +================ + +These are created by numeric literals and returned as results by +arithmetic operators and arithmetic built-in functions. Numeric +objects are immutable; once created their value never changes. Python +numbers are of course strongly related to mathematical numbers, but +subject to the limitations of numerical representation in computers. + +The string representations of the numeric classes, computed by +"__repr__()" and "__str__()", have the following properties: + +* They are valid numeric literals which, when passed to their class + constructor, produce an object having the value of the original + numeric. + +* The representation is in base 10, when possible. + +* Leading zeros, possibly excepting a single zero before a decimal + point, are not shown. + +* Trailing zeros, possibly excepting a single zero after a decimal + point, are not shown. + +* A sign is shown only when the number is negative. + +Python distinguishes between integers, floating-point numbers, and +complex numbers: + + +"numbers.Integral" +------------------ + +These represent elements from the mathematical set of integers +(positive and negative). + +Note: + + The rules for integer representation are intended to give the most + meaningful interpretation of shift and mask operations involving + negative integers. + +There are two types of integers: + +Integers ("int") + These represent numbers in an unlimited range, subject to available + (virtual) memory only. For the purpose of shift and mask + operations, a binary representation is assumed, and negative + numbers are represented in a variant of 2’s complement which gives + the illusion of an infinite string of sign bits extending to the + left. + +Booleans ("bool") + These represent the truth values False and True. The two objects + representing the values "False" and "True" are the only Boolean + objects. The Boolean type is a subtype of the integer type, and + Boolean values behave like the values 0 and 1, respectively, in + almost all contexts, the exception being that when converted to a + string, the strings ""False"" or ""True"" are returned, + respectively. + + +"numbers.Real" ("float") +------------------------ + +These represent machine-level double precision floating-point numbers. +You are at the mercy of the underlying machine architecture (and C or +Java implementation) for the accepted range and handling of overflow. +Python does not support single-precision floating-point numbers; the +savings in processor and memory usage that are usually the reason for +using these are dwarfed by the overhead of using objects in Python, so +there is no reason to complicate the language with two kinds of +floating-point numbers. + + +"numbers.Complex" ("complex") +----------------------------- + +These represent complex numbers as a pair of machine-level double +precision floating-point numbers. The same caveats apply as for +floating-point numbers. The real and imaginary parts of a complex +number "z" can be retrieved through the read-only attributes "z.real" +and "z.imag". + + +Sequences +========= + +These represent finite ordered sets indexed by non-negative numbers. +The built-in function "len()" returns the number of items of a +sequence. When the length of a sequence is *n*, the index set contains +the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected by +"a[i]". Some sequences, including built-in sequences, interpret +negative subscripts by adding the sequence length. For example, +"a[-2]" equals "a[n-2]", the second to last item of sequence a with +length "n". + +Sequences also support slicing: "a[i:j]" selects all items with index +*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a +slice is a sequence of the same type. The comment above about negative +indexes also applies to negative slice positions. + +Some sequences also support “extended slicing†with a third “step†+parameter: "a[i:j:k]" selects all items of *a* with index *x* where "x += i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*. + +Sequences are distinguished according to their mutability: + + +Immutable sequences +------------------- + +An object of an immutable sequence type cannot change once it is +created. (If the object contains references to other objects, these +other objects may be mutable and may be changed; however, the +collection of objects directly referenced by an immutable object +cannot change.) + +The following types are immutable sequences: + +Strings + A string is a sequence of values that represent Unicode code + points. All the code points in the range "U+0000 - U+10FFFF" can be + represented in a string. Python doesn’t have a char type; instead, + every code point in the string is represented as a string object + with length "1". The built-in function "ord()" converts a code + point from its string form to an integer in the range "0 - 10FFFF"; + "chr()" converts an integer in the range "0 - 10FFFF" to the + corresponding length "1" string object. "str.encode()" can be used + to convert a "str" to "bytes" using the given text encoding, and + "bytes.decode()" can be used to achieve the opposite. + +Tuples + The items of a tuple are arbitrary Python objects. Tuples of two or + more items are formed by comma-separated lists of expressions. A + tuple of one item (a ‘singleton’) can be formed by affixing a comma + to an expression (an expression by itself does not create a tuple, + since parentheses must be usable for grouping of expressions). An + empty tuple can be formed by an empty pair of parentheses. + +Bytes + A bytes object is an immutable array. The items are 8-bit bytes, + represented by integers in the range 0 <= x < 256. Bytes literals + (like "b'abc'") and the built-in "bytes()" constructor can be used + to create bytes objects. Also, bytes objects can be decoded to + strings via the "decode()" method. + + +Mutable sequences +----------------- + +Mutable sequences can be changed after they are created. The +subscription and slicing notations can be used as the target of +assignment and "del" (delete) statements. + +Note: + + The "collections" and "array" module provide additional examples of + mutable sequence types. + +There are currently two intrinsic mutable sequence types: + +Lists + The items of a list are arbitrary Python objects. Lists are formed + by placing a comma-separated list of expressions in square + brackets. (Note that there are no special cases needed to form + lists of length 0 or 1.) + +Byte Arrays + A bytearray object is a mutable array. They are created by the + built-in "bytearray()" constructor. Aside from being mutable (and + hence unhashable), byte arrays otherwise provide the same interface + and functionality as immutable "bytes" objects. + + +Set types +========= + +These represent unordered, finite sets of unique, immutable objects. +As such, they cannot be indexed by any subscript. However, they can be +iterated over, and the built-in function "len()" returns the number of +items in a set. Common uses for sets are fast membership testing, +removing duplicates from a sequence, and computing mathematical +operations such as intersection, union, difference, and symmetric +difference. + +For set elements, the same immutability rules apply as for dictionary +keys. Note that numeric types obey the normal rules for numeric +comparison: if two numbers compare equal (e.g., "1" and "1.0"), only +one of them can be contained in a set. + +There are currently two intrinsic set types: + +Sets + These represent a mutable set. They are created by the built-in + "set()" constructor and can be modified afterwards by several + methods, such as "add()". + +Frozen sets + These represent an immutable set. They are created by the built-in + "frozenset()" constructor. As a frozenset is immutable and + *hashable*, it can be used again as an element of another set, or + as a dictionary key. + + +Mappings +======== + +These represent finite sets of objects indexed by arbitrary index +sets. The subscript notation "a[k]" selects the item indexed by "k" +from the mapping "a"; this can be used in expressions and as the +target of assignments or "del" statements. The built-in function +"len()" returns the number of items in a mapping. + +There is currently a single intrinsic mapping type: + + +Dictionaries +------------ + +These represent finite sets of objects indexed by nearly arbitrary +values. The only types of values not acceptable as keys are values +containing lists or dictionaries or other mutable types that are +compared by value rather than by object identity, the reason being +that the efficient implementation of dictionaries requires a key’s +hash value to remain constant. Numeric types used for keys obey the +normal rules for numeric comparison: if two numbers compare equal +(e.g., "1" and "1.0") then they can be used interchangeably to index +the same dictionary entry. + +Dictionaries preserve insertion order, meaning that keys will be +produced in the same order they were added sequentially over the +dictionary. Replacing an existing key does not change the order, +however removing a key and re-inserting it will add it to the end +instead of keeping its old place. + +Dictionaries are mutable; they can be created by the "{}" notation +(see section Dictionary displays). + +The extension modules "dbm.ndbm" and "dbm.gnu" provide additional +examples of mapping types, as does the "collections" module. + +Changed in version 3.7: Dictionaries did not preserve insertion order +in versions of Python before 3.6. In CPython 3.6, insertion order was +preserved, but it was considered an implementation detail at that time +rather than a language guarantee. + + +Callable types +============== + +These are the types to which the function call operation (see section +Calls) can be applied: + + +User-defined functions +---------------------- + +A user-defined function object is created by a function definition +(see section Function definitions). It should be called with an +argument list containing the same number of items as the function’s +formal parameter list. + + +Special read-only attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| Attribute | Meaning | +|====================================================|====================================================| +| function.__globals__ | A reference to the "dictionary" that holds the | +| | function’s global variables – the global namespace | +| | of the module in which the function was defined. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__closure__ | "None" or a "tuple" of cells that contain bindings | +| | for the names specified in the "co_freevars" | +| | attribute of the function’s "code object". A cell | +| | object has the attribute "cell_contents". This can | +| | be used to get the value of the cell, as well as | +| | set the value. | ++----------------------------------------------------+----------------------------------------------------+ + + +Special writable attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Most of these attributes check the type of the assigned value: + ++----------------------------------------------------+----------------------------------------------------+ +| Attribute | Meaning | +|====================================================|====================================================| +| function.__doc__ | The function’s documentation string, or "None" if | +| | unavailable. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__name__ | The function’s name. See also: "__name__ | +| | attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| function.__qualname__ | The function’s *qualified name*. See also: | +| | "__qualname__ attributes". Added in version 3.3. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__module__ | The name of the module the function was defined | +| | in, or "None" if unavailable. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__defaults__ | A "tuple" containing default *parameter* values | +| | for those parameters that have defaults, or "None" | +| | if no parameters have a default value. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__code__ | The code object representing the compiled function | +| | body. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__dict__ | The namespace supporting arbitrary function | +| | attributes. See also: "__dict__ attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| function.__annotations__ | A "dictionary" containing annotations of | +| | *parameters*. The keys of the dictionary are the | +| | parameter names, and "'return'" for the return | +| | annotation, if provided. See also: | +| | "object.__annotations__". Changed in version | +| | 3.14: Annotations are now lazily evaluated. See | +| | **PEP 649**. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__annotate__ | The *annotate function* for this function, or | +| | "None" if the function has no annotations. See | +| | "object.__annotate__". Added in version 3.14. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__kwdefaults__ | A "dictionary" containing defaults for keyword- | +| | only *parameters*. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__type_params__ | A "tuple" containing the type parameters of a | +| | generic function. Added in version 3.12. | ++----------------------------------------------------+----------------------------------------------------+ + +Function objects also support getting and setting arbitrary +attributes, which can be used, for example, to attach metadata to +functions. Regular attribute dot-notation is used to get and set such +attributes. + +**CPython implementation detail:** CPython’s current implementation +only supports function attributes on user-defined functions. Function +attributes on built-in functions may be supported in the future. + +Additional information about a function’s definition can be retrieved +from its code object (accessible via the "__code__" attribute). + + +Instance methods +---------------- + +An instance method object combines a class, a class instance and any +callable object (normally a user-defined function). + +Special read-only attributes: + ++----------------------------------------------------+----------------------------------------------------+ +| method.__self__ | Refers to the class instance object to which the | +| | method is bound | ++----------------------------------------------------+----------------------------------------------------+ +| method.__func__ | Refers to the original function object | ++----------------------------------------------------+----------------------------------------------------+ +| method.__doc__ | The method’s documentation (same as | +| | "method.__func__.__doc__"). A "string" if the | +| | original function had a docstring, else "None". | ++----------------------------------------------------+----------------------------------------------------+ +| method.__name__ | The name of the method (same as | +| | "method.__func__.__name__") | ++----------------------------------------------------+----------------------------------------------------+ +| method.__module__ | The name of the module the method was defined in, | +| | or "None" if unavailable. | ++----------------------------------------------------+----------------------------------------------------+ + +Methods also support accessing (but not setting) the arbitrary +function attributes on the underlying function object. + +User-defined method objects may be created when getting an attribute +of a class (perhaps via an instance of that class), if that attribute +is a user-defined function object or a "classmethod" object. + +When an instance method object is created by retrieving a user-defined +function object from a class via one of its instances, its "__self__" +attribute is the instance, and the method object is said to be +*bound*. The new method’s "__func__" attribute is the original +function object. + +When an instance method object is created by retrieving a +"classmethod" object from a class or instance, its "__self__" +attribute is the class itself, and its "__func__" attribute is the +function object underlying the class method. + +When an instance method object is called, the underlying function +("__func__") is called, inserting the class instance ("__self__") in +front of the argument list. For instance, when "C" is a class which +contains a definition for a function "f()", and "x" is an instance of +"C", calling "x.f(1)" is equivalent to calling "C.f(x, 1)". + +When an instance method object is derived from a "classmethod" object, +the “class instance†stored in "__self__" will actually be the class +itself, so that calling either "x.f(1)" or "C.f(1)" is equivalent to +calling "f(C,1)" where "f" is the underlying function. + +It is important to note that user-defined functions which are +attributes of a class instance are not converted to bound methods; +this *only* happens when the function is an attribute of the class. + + +Generator functions +------------------- + +A function or method which uses the "yield" statement (see section The +yield statement) is called a *generator function*. Such a function, +when called, always returns an *iterator* object which can be used to +execute the body of the function: calling the iterator’s +"iterator.__next__()" method will cause the function to execute until +it provides a value using the "yield" statement. When the function +executes a "return" statement or falls off the end, a "StopIteration" +exception is raised and the iterator will have reached the end of the +set of values to be returned. + + +Coroutine functions +------------------- + +A function or method which is defined using "async def" is called a +*coroutine function*. Such a function, when called, returns a +*coroutine* object. It may contain "await" expressions, as well as +"async with" and "async for" statements. See also the Coroutine +Objects section. + + +Asynchronous generator functions +-------------------------------- + +A function or method which is defined using "async def" and which uses +the "yield" statement is called a *asynchronous generator function*. +Such a function, when called, returns an *asynchronous iterator* +object which can be used in an "async for" statement to execute the +body of the function. + +Calling the asynchronous iterator’s "aiterator.__anext__" method will +return an *awaitable* which when awaited will execute until it +provides a value using the "yield" expression. When the function +executes an empty "return" statement or falls off the end, a +"StopAsyncIteration" exception is raised and the asynchronous iterator +will have reached the end of the set of values to be yielded. + + +Built-in functions +------------------ + +A built-in function object is a wrapper around a C function. Examples +of built-in functions are "len()" and "math.sin()" ("math" is a +standard built-in module). The number and type of the arguments are +determined by the C function. Special read-only attributes: + +* "__doc__" is the function’s documentation string, or "None" if + unavailable. See "function.__doc__". + +* "__name__" is the function’s name. See "function.__name__". + +* "__self__" is set to "None" (but see the next item). + +* "__module__" is the name of the module the function was defined in + or "None" if unavailable. See "function.__module__". + + +Built-in methods +---------------- + +This is really a different disguise of a built-in function, this time +containing an object passed to the C function as an implicit extra +argument. An example of a built-in method is "alist.append()", +assuming *alist* is a list object. In this case, the special read-only +attribute "__self__" is set to the object denoted by *alist*. (The +attribute has the same semantics as it does with "other instance +methods".) + + +Classes +------- + +Classes are callable. These objects normally act as factories for new +instances of themselves, but variations are possible for class types +that override "__new__()". The arguments of the call are passed to +"__new__()" and, in the typical case, to "__init__()" to initialize +the new instance. + + +Class Instances +--------------- + +Instances of arbitrary classes can be made callable by defining a +"__call__()" method in their class. + + +Modules +======= + +Modules are a basic organizational unit of Python code, and are +created by the import system as invoked either by the "import" +statement, or by calling functions such as "importlib.import_module()" +and built-in "__import__()". A module object has a namespace +implemented by a "dictionary" object (this is the dictionary +referenced by the "__globals__" attribute of functions defined in the +module). Attribute references are translated to lookups in this +dictionary, e.g., "m.x" is equivalent to "m.__dict__["x"]". A module +object does not contain the code object used to initialize the module +(since it isn’t needed once the initialization is done). + +Attribute assignment updates the module’s namespace dictionary, e.g., +"m.x = 1" is equivalent to "m.__dict__["x"] = 1". + + +Import-related attributes on module objects +------------------------------------------- + +Module objects have the following attributes that relate to the import +system. When a module is created using the machinery associated with +the import system, these attributes are filled in based on the +module’s *spec*, before the *loader* executes and loads the module. + +To create a module dynamically rather than using the import system, +it’s recommended to use "importlib.util.module_from_spec()", which +will set the various import-controlled attributes to appropriate +values. It’s also possible to use the "types.ModuleType" constructor +to create modules directly, but this technique is more error-prone, as +most attributes must be manually set on the module object after it has +been created when using this approach. + +Caution: + + With the exception of "__name__", it is **strongly** recommended + that you rely on "__spec__" and its attributes instead of any of the + other individual attributes listed in this subsection. Note that + updating an attribute on "__spec__" will not update the + corresponding attribute on the module itself: + + >>> import typing + >>> typing.__name__, typing.__spec__.name + ('typing', 'typing') + >>> typing.__spec__.name = 'spelling' + >>> typing.__name__, typing.__spec__.name + ('typing', 'spelling') + >>> typing.__name__ = 'keyboard_smashing' + >>> typing.__name__, typing.__spec__.name + ('keyboard_smashing', 'spelling') + +module.__name__ + + The name used to uniquely identify the module in the import system. + For a directly executed module, this will be set to ""__main__"". + + This attribute must be set to the fully qualified name of the + module. It is expected to match the value of + "module.__spec__.name". + +module.__spec__ + + A record of the module’s import-system-related state. + + Set to the "module spec" that was used when importing the module. + See Module specs for more details. + + Added in version 3.4. + +module.__package__ + + The *package* a module belongs to. + + If the module is top-level (that is, not a part of any specific + package) then the attribute should be set to "''" (the empty + string). Otherwise, it should be set to the name of the module’s + package (which can be equal to "module.__name__" if the module + itself is a package). See **PEP 366** for further details. + + This attribute is used instead of "__name__" to calculate explicit + relative imports for main modules. It defaults to "None" for + modules created dynamically using the "types.ModuleType" + constructor; use "importlib.util.module_from_spec()" instead to + ensure the attribute is set to a "str". + + It is **strongly** recommended that you use + "module.__spec__.parent" instead of "module.__package__". + "__package__" is now only used as a fallback if "__spec__.parent" + is not set, and this fallback path is deprecated. + + Changed in version 3.4: This attribute now defaults to "None" for + modules created dynamically using the "types.ModuleType" + constructor. Previously the attribute was optional. + + Changed in version 3.6: The value of "__package__" is expected to + be the same as "__spec__.parent". "__package__" is now only used as + a fallback during import resolution if "__spec__.parent" is not + defined. + + Changed in version 3.10: "ImportWarning" is raised if an import + resolution falls back to "__package__" instead of + "__spec__.parent". + + Changed in version 3.12: Raise "DeprecationWarning" instead of + "ImportWarning" when falling back to "__package__" during import + resolution. + + Deprecated since version 3.13, will be removed in version 3.15: + "__package__" will cease to be set or taken into consideration by + the import system or standard library. + +module.__loader__ + + The *loader* object that the import machinery used to load the + module. + + This attribute is mostly useful for introspection, but can be used + for additional loader-specific functionality, for example getting + data associated with a loader. + + "__loader__" defaults to "None" for modules created dynamically + using the "types.ModuleType" constructor; use + "importlib.util.module_from_spec()" instead to ensure the attribute + is set to a *loader* object. + + It is **strongly** recommended that you use + "module.__spec__.loader" instead of "module.__loader__". + + Changed in version 3.4: This attribute now defaults to "None" for + modules created dynamically using the "types.ModuleType" + constructor. Previously the attribute was optional. + + Deprecated since version 3.12, will be removed in version 3.16: + Setting "__loader__" on a module while failing to set + "__spec__.loader" is deprecated. In Python 3.16, "__loader__" will + cease to be set or taken into consideration by the import system or + the standard library. + +module.__path__ + + A (possibly empty) *sequence* of strings enumerating the locations + where the package’s submodules will be found. Non-package modules + should not have a "__path__" attribute. See __path__ attributes on + modules for more details. + + It is **strongly** recommended that you use + "module.__spec__.submodule_search_locations" instead of + "module.__path__". + +module.__file__ + +module.__cached__ + + "__file__" and "__cached__" are both optional attributes that may + or may not be set. Both attributes should be a "str" when they are + available. + + "__file__" indicates the pathname of the file from which the module + was loaded (if loaded from a file), or the pathname of the shared + library file for extension modules loaded dynamically from a shared + library. It might be missing for certain types of modules, such as + C modules that are statically linked into the interpreter, and the + import system may opt to leave it unset if it has no semantic + meaning (for example, a module loaded from a database). + + If "__file__" is set then the "__cached__" attribute might also be + set, which is the path to any compiled version of the code (for + example, a byte-compiled file). The file does not need to exist to + set this attribute; the path can simply point to where the compiled + file *would* exist (see **PEP 3147**). + + Note that "__cached__" may be set even if "__file__" is not set. + However, that scenario is quite atypical. Ultimately, the *loader* + is what makes use of the module spec provided by the *finder* (from + which "__file__" and "__cached__" are derived). So if a loader can + load from a cached module but otherwise does not load from a file, + that atypical scenario may be appropriate. + + It is **strongly** recommended that you use + "module.__spec__.cached" instead of "module.__cached__". + + Deprecated since version 3.13, will be removed in version 3.15: + Setting "__cached__" on a module while failing to set + "__spec__.cached" is deprecated. In Python 3.15, "__cached__" will + cease to be set or taken into consideration by the import system or + standard library. + + +Other writable attributes on module objects +------------------------------------------- + +As well as the import-related attributes listed above, module objects +also have the following writable attributes: + +module.__doc__ + + The module’s documentation string, or "None" if unavailable. See + also: "__doc__ attributes". + +module.__annotations__ + + A dictionary containing *variable annotations* collected during + module body execution. For best practices on working with + "__annotations__", see "annotationlib". + + Changed in version 3.14: Annotations are now lazily evaluated. See + **PEP 649**. + +module.__annotate__ + + The *annotate function* for this module, or "None" if the module + has no annotations. See also: "__annotate__" attributes. + + Added in version 3.14. + + +Module dictionaries +------------------- + +Module objects also have the following special read-only attribute: + +module.__dict__ + + The module’s namespace as a dictionary object. Uniquely among the + attributes listed here, "__dict__" cannot be accessed as a global + variable from within a module; it can only be accessed as an + attribute on module objects. + + **CPython implementation detail:** Because of the way CPython + clears module dictionaries, the module dictionary will be cleared + when the module falls out of scope even if the dictionary still has + live references. To avoid this, copy the dictionary or keep the + module around while using its dictionary directly. + + +Custom classes +============== + +Custom class types are typically created by class definitions (see +section Class definitions). A class has a namespace implemented by a +dictionary object. Class attribute references are translated to +lookups in this dictionary, e.g., "C.x" is translated to +"C.__dict__["x"]" (although there are a number of hooks which allow +for other means of locating attributes). When the attribute name is +not found there, the attribute search continues in the base classes. +This search of the base classes uses the C3 method resolution order +which behaves correctly even in the presence of ‘diamond’ inheritance +structures where there are multiple inheritance paths leading back to +a common ancestor. Additional details on the C3 MRO used by Python can +be found at The Python 2.3 Method Resolution Order. + +When a class attribute reference (for class "C", say) would yield a +class method object, it is transformed into an instance method object +whose "__self__" attribute is "C". When it would yield a +"staticmethod" object, it is transformed into the object wrapped by +the static method object. See section Implementing Descriptors for +another way in which attributes retrieved from a class may differ from +those actually contained in its "__dict__". + +Class attribute assignments update the class’s dictionary, never the +dictionary of a base class. + +A class object can be called (see above) to yield a class instance +(see below). + + +Special attributes +------------------ + ++----------------------------------------------------+----------------------------------------------------+ +| Attribute | Meaning | +|====================================================|====================================================| +| type.__name__ | The class’s name. See also: "__name__ attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__qualname__ | The class’s *qualified name*. See also: | +| | "__qualname__ attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__module__ | The name of the module in which the class was | +| | defined. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__dict__ | A "mapping proxy" providing a read-only view of | +| | the class’s namespace. See also: "__dict__ | +| | attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__bases__ | A "tuple" containing the class’s bases. In most | +| | cases, for a class defined as "class X(A, B, C)", | +| | "X.__bases__" will be exactly equal to "(A, B, | +| | C)". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__doc__ | The class’s documentation string, or "None" if | +| | undefined. Not inherited by subclasses. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__annotations__ | A dictionary containing *variable annotations* | +| | collected during class body execution. See also: | +| | "__annotations__ attributes". For best practices | +| | on working with "__annotations__", please see | +| | "annotationlib". Caution: Accessing the | +| | "__annotations__" attribute of a class object | +| | directly may yield incorrect results in the | +| | presence of metaclasses. In addition, the | +| | attribute may not exist for some classes. Use | +| | "annotationlib.get_annotations()" to retrieve | +| | class annotations safely. Changed in version | +| | 3.14: Annotations are now lazily evaluated. See | +| | **PEP 649**. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__annotate__() | The *annotate function* for this class, or "None" | +| | if the class has no annotations. See also: | +| | "__annotate__ attributes". Caution: Accessing | +| | the "__annotate__" attribute of a class object | +| | directly may yield incorrect results in the | +| | presence of metaclasses. Use | +| | "annotationlib.get_annotate_function()" to | +| | retrieve the annotate function safely. Added in | +| | version 3.14. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__type_params__ | A "tuple" containing the type parameters of a | +| | generic class. Added in version 3.12. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__static_attributes__ | A "tuple" containing names of attributes of this | +| | class which are assigned through "self.X" from any | +| | function in its body. Added in version 3.13. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__firstlineno__ | The line number of the first line of the class | +| | definition, including decorators. Setting the | +| | "__module__" attribute removes the | +| | "__firstlineno__" item from the type’s dictionary. | +| | Added in version 3.13. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__mro__ | The "tuple" of classes that are considered when | +| | looking for base classes during method resolution. | ++----------------------------------------------------+----------------------------------------------------+ + + +Special methods +--------------- + +In addition to the special attributes described above, all Python +classes also have the following two methods available: + +type.mro() + + This method can be overridden by a metaclass to customize the + method resolution order for its instances. It is called at class + instantiation, and its result is stored in "__mro__". + +type.__subclasses__() + + Each class keeps a list of weak references to its immediate + subclasses. This method returns a list of all those references + still alive. The list is in definition order. Example: + + >>> class A: pass + >>> class B(A): pass + >>> A.__subclasses__() + [] + + +Class instances +=============== + +A class instance is created by calling a class object (see above). A +class instance has a namespace implemented as a dictionary which is +the first place in which attribute references are searched. When an +attribute is not found there, and the instance’s class has an +attribute by that name, the search continues with the class +attributes. If a class attribute is found that is a user-defined +function object, it is transformed into an instance method object +whose "__self__" attribute is the instance. Static method and class +method objects are also transformed; see above under “Classesâ€. See +section Implementing Descriptors for another way in which attributes +of a class retrieved via its instances may differ from the objects +actually stored in the class’s "__dict__". If no class attribute is +found, and the object’s class has a "__getattr__()" method, that is +called to satisfy the lookup. + +Attribute assignments and deletions update the instance’s dictionary, +never a class’s dictionary. If the class has a "__setattr__()" or +"__delattr__()" method, this is called instead of updating the +instance dictionary directly. + +Class instances can pretend to be numbers, sequences, or mappings if +they have methods with certain special names. See section Special +method names. + + +Special attributes +------------------ + +object.__class__ + + The class to which a class instance belongs. + +object.__dict__ + + A dictionary or other mapping object used to store an object’s + (writable) attributes. Not all instances have a "__dict__" + attribute; see the section on __slots__ for more details. + + +I/O objects (also known as file objects) +======================================== + +A *file object* represents an open file. Various shortcuts are +available to create file objects: the "open()" built-in function, and +also "os.popen()", "os.fdopen()", and the "makefile()" method of +socket objects (and perhaps by other functions or methods provided by +extension modules). + +The objects "sys.stdin", "sys.stdout" and "sys.stderr" are initialized +to file objects corresponding to the interpreter’s standard input, +output and error streams; they are all open in text mode and therefore +follow the interface defined by the "io.TextIOBase" abstract class. + + +Internal types +============== + +A few types used internally by the interpreter are exposed to the +user. Their definitions may change with future versions of the +interpreter, but they are mentioned here for completeness. + + +Code objects +------------ + +Code objects represent *byte-compiled* executable Python code, or +*bytecode*. The difference between a code object and a function object +is that the function object contains an explicit reference to the +function’s globals (the module in which it was defined), while a code +object contains no context; also the default argument values are +stored in the function object, not in the code object (because they +represent values calculated at run-time). Unlike function objects, +code objects are immutable and contain no references (directly or +indirectly) to mutable objects. + + +Special read-only attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_name | The function name | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_qualname | The fully qualified function name Added in | +| | version 3.11. | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_argcount | The total number of positional *parameters* | +| | (including positional-only parameters and | +| | parameters with default values) that the function | +| | has | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_posonlyargcount | The number of positional-only *parameters* | +| | (including arguments with default values) that the | +| | function has | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_kwonlyargcount | The number of keyword-only *parameters* (including | +| | arguments with default values) that the function | +| | has | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_nlocals | The number of local variables used by the function | +| | (including parameters) | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_varnames | A "tuple" containing the names of the local | +| | variables in the function (starting with the | +| | parameter names) | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_cellvars | A "tuple" containing the names of local variables | +| | that are referenced from at least one *nested | +| | scope* inside the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_freevars | A "tuple" containing the names of *free (closure) | +| | variables* that a *nested scope* references in an | +| | outer scope. See also "function.__closure__". | +| | Note: references to global and builtin names are | +| | *not* included. | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_code | A string representing the sequence of *bytecode* | +| | instructions in the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_consts | A "tuple" containing the literals used by the | +| | *bytecode* in the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_names | A "tuple" containing the names used by the | +| | *bytecode* in the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_filename | The name of the file from which the code was | +| | compiled | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_firstlineno | The line number of the first line of the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_lnotab | A string encoding the mapping from *bytecode* | +| | offsets to line numbers. For details, see the | +| | source code of the interpreter. Deprecated since | +| | version 3.12: This attribute of code objects is | +| | deprecated, and may be removed in Python 3.15. | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_stacksize | The required stack size of the code object | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_flags | An "integer" encoding a number of flags for the | +| | interpreter. | ++----------------------------------------------------+----------------------------------------------------+ + +The following flag bits are defined for "co_flags": bit "0x04" is set +if the function uses the "*arguments" syntax to accept an arbitrary +number of positional arguments; bit "0x08" is set if the function uses +the "**keywords" syntax to accept arbitrary keyword arguments; bit +"0x20" is set if the function is a generator. See Code Objects Bit +Flags for details on the semantics of each flags that might be +present. + +Future feature declarations ("from __future__ import division") also +use bits in "co_flags" to indicate whether a code object was compiled +with a particular feature enabled: bit "0x2000" is set if the function +was compiled with future division enabled; bits "0x10" and "0x1000" +were used in earlier versions of Python. + +Other bits in "co_flags" are reserved for internal use. + +If a code object represents a function and has a docstring, the first +item in "co_consts" is the docstring of the function. + + +Methods on code objects +~~~~~~~~~~~~~~~~~~~~~~~ + +codeobject.co_positions() + + Returns an iterable over the source code positions of each + *bytecode* instruction in the code object. + + The iterator returns "tuple"s containing the "(start_line, + end_line, start_column, end_column)". The *i-th* tuple corresponds + to the position of the source code that compiled to the *i-th* code + unit. Column information is 0-indexed utf-8 byte offsets on the + given source line. + + This positional information can be missing. A non-exhaustive lists + of cases where this may happen: + + * Running the interpreter with "-X" "no_debug_ranges". + + * Loading a pyc file compiled while using "-X" "no_debug_ranges". + + * Position tuples corresponding to artificial instructions. + + * Line and column numbers that can’t be represented due to + implementation specific limitations. + + When this occurs, some or all of the tuple elements can be "None". + + Added in version 3.11. + + Note: + + This feature requires storing column positions in code objects + which may result in a small increase of disk usage of compiled + Python files or interpreter memory usage. To avoid storing the + extra information and/or deactivate printing the extra traceback + information, the "-X" "no_debug_ranges" command line flag or the + "PYTHONNODEBUGRANGES" environment variable can be used. + +codeobject.co_lines() + + Returns an iterator that yields information about successive ranges + of *bytecode*s. Each item yielded is a "(start, end, lineno)" + "tuple": + + * "start" (an "int") represents the offset (inclusive) of the start + of the *bytecode* range + + * "end" (an "int") represents the offset (exclusive) of the end of + the *bytecode* range + + * "lineno" is an "int" representing the line number of the + *bytecode* range, or "None" if the bytecodes in the given range + have no line number + + The items yielded will have the following properties: + + * The first range yielded will have a "start" of 0. + + * The "(start, end)" ranges will be non-decreasing and consecutive. + That is, for any pair of "tuple"s, the "start" of the second will + be equal to the "end" of the first. + + * No range will be backwards: "end >= start" for all triples. + + * The last "tuple" yielded will have "end" equal to the size of the + *bytecode*. + + Zero-width ranges, where "start == end", are allowed. Zero-width + ranges are used for lines that are present in the source code, but + have been eliminated by the *bytecode* compiler. + + Added in version 3.10. + + See also: + + **PEP 626** - Precise line numbers for debugging and other tools. + The PEP that introduced the "co_lines()" method. + +codeobject.replace(**kwargs) + + Return a copy of the code object with new values for the specified + fields. + + Code objects are also supported by the generic function + "copy.replace()". + + Added in version 3.8. + + +Frame objects +------------- + +Frame objects represent execution frames. They may occur in traceback +objects, and are also passed to registered trace functions. + + +Special read-only attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_back | Points to the previous stack frame (towards the | +| | caller), or "None" if this is the bottom stack | +| | frame | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_code | The code object being executed in this frame. | +| | Accessing this attribute raises an auditing event | +| | "object.__getattr__" with arguments "obj" and | +| | ""f_code"". | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_locals | The mapping used by the frame to look up local | +| | variables. If the frame refers to an *optimized | +| | scope*, this may return a write-through proxy | +| | object. Changed in version 3.13: Return a proxy | +| | for optimized scopes. | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_globals | The dictionary used by the frame to look up global | +| | variables | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_builtins | The dictionary used by the frame to look up built- | +| | in (intrinsic) names | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_lasti | The “precise instruction†of the frame object | +| | (this is an index into the *bytecode* string of | +| | the code object) | ++----------------------------------------------------+----------------------------------------------------+ + + +Special writable attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_trace | If not "None", this is a function called for | +| | various events during code execution (this is used | +| | by debuggers). Normally an event is triggered for | +| | each new source line (see "f_trace_lines"). | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_trace_lines | Set this attribute to "False" to disable | +| | triggering a tracing event for each source line. | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_trace_opcodes | Set this attribute to "True" to allow per-opcode | +| | events to be requested. Note that this may lead to | +| | undefined interpreter behaviour if exceptions | +| | raised by the trace function escape to the | +| | function being traced. | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_lineno | The current line number of the frame – writing to | +| | this from within a trace function jumps to the | +| | given line (only for the bottom-most frame). A | +| | debugger can implement a Jump command (aka Set | +| | Next Statement) by writing to this attribute. | ++----------------------------------------------------+----------------------------------------------------+ + + +Frame object methods +~~~~~~~~~~~~~~~~~~~~ + +Frame objects support one method: + +frame.clear() + + This method clears all references to local variables held by the + frame. Also, if the frame belonged to a *generator*, the generator + is finalized. This helps break reference cycles involving frame + objects (for example when catching an exception and storing its + traceback for later use). + + "RuntimeError" is raised if the frame is currently executing or + suspended. + + Added in version 3.4. + + Changed in version 3.13: Attempting to clear a suspended frame + raises "RuntimeError" (as has always been the case for executing + frames). + + +Traceback objects +----------------- + +Traceback objects represent the stack trace of an exception. A +traceback object is implicitly created when an exception occurs, and +may also be explicitly created by calling "types.TracebackType". + +Changed in version 3.7: Traceback objects can now be explicitly +instantiated from Python code. + +For implicitly created tracebacks, when the search for an exception +handler unwinds the execution stack, at each unwound level a traceback +object is inserted in front of the current traceback. When an +exception handler is entered, the stack trace is made available to the +program. (See section The try statement.) It is accessible as the +third item of the tuple returned by "sys.exc_info()", and as the +"__traceback__" attribute of the caught exception. + +When the program contains no suitable handler, the stack trace is +written (nicely formatted) to the standard error stream; if the +interpreter is interactive, it is also made available to the user as +"sys.last_traceback". + +For explicitly created tracebacks, it is up to the creator of the +traceback to determine how the "tb_next" attributes should be linked +to form a full stack trace. + +Special read-only attributes: + ++----------------------------------------------------+----------------------------------------------------+ +| traceback.tb_frame | Points to the execution frame of the current | +| | level. Accessing this attribute raises an | +| | auditing event "object.__getattr__" with arguments | +| | "obj" and ""tb_frame"". | ++----------------------------------------------------+----------------------------------------------------+ +| traceback.tb_lineno | Gives the line number where the exception occurred | ++----------------------------------------------------+----------------------------------------------------+ +| traceback.tb_lasti | Indicates the “precise instructionâ€. | ++----------------------------------------------------+----------------------------------------------------+ + +The line number and last instruction in the traceback may differ from +the line number of its frame object if the exception occurred in a +"try" statement with no matching except clause or with a "finally" +clause. + +traceback.tb_next + + The special writable attribute "tb_next" is the next level in the + stack trace (towards the frame where the exception occurred), or + "None" if there is no next level. + + Changed in version 3.7: This attribute is now writable + + +Slice objects +------------- + +Slice objects are used to represent slices for "__getitem__()" +methods. They are also created by the built-in "slice()" function. + +Special read-only attributes: "start" is the lower bound; "stop" is +the upper bound; "step" is the step value; each is "None" if omitted. +These attributes can have any type. + +Slice objects support one method: + +slice.indices(self, length) + + This method takes a single integer argument *length* and computes + information about the slice that the slice object would describe if + applied to a sequence of *length* items. It returns a tuple of + three integers; respectively these are the *start* and *stop* + indices and the *step* or stride length of the slice. Missing or + out-of-bounds indices are handled in a manner consistent with + regular slices. + + +Static method objects +--------------------- + +Static method objects provide a way of defeating the transformation of +function objects to method objects described above. A static method +object is a wrapper around any other object, usually a user-defined +method object. When a static method object is retrieved from a class +or a class instance, the object actually returned is the wrapped +object, which is not subject to any further transformation. Static +method objects are also callable. Static method objects are created by +the built-in "staticmethod()" constructor. + + +Class method objects +-------------------- + +A class method object, like a static method object, is a wrapper +around another object that alters the way in which that object is +retrieved from classes and class instances. The behaviour of class +method objects upon such retrieval is described above, under “instance +methodsâ€. Class method objects are created by the built-in +"classmethod()" constructor. +''', + 'typesfunctions': r'''Functions +********* + +Function objects are created by function definitions. The only +operation on a function object is to call it: "func(argument-list)". + +There are really two flavors of function objects: built-in functions +and user-defined functions. Both support the same operation (to call +the function), but the implementation is different, hence the +different object types. + +See Function definitions for more information. +''', + 'typesmapping': r'''Mapping Types — "dict" +********************** + +A *mapping* object maps *hashable* values to arbitrary objects. +Mappings are mutable objects. There is currently only one standard +mapping type, the *dictionary*. (For other containers see the built- +in "list", "set", and "tuple" classes, and the "collections" module.) + +A dictionary’s keys are *almost* arbitrary values. Values that are +not *hashable*, that is, values containing lists, dictionaries or +other mutable types (that are compared by value rather than by object +identity) may not be used as keys. Values that compare equal (such as +"1", "1.0", and "True") can be used interchangeably to index the same +dictionary entry. + +class dict(**kwargs) +class dict(mapping, **kwargs) +class dict(iterable, **kwargs) + + Return a new dictionary initialized from an optional positional + argument and a possibly empty set of keyword arguments. + + Dictionaries can be created by several means: + + * Use a comma-separated list of "key: value" pairs within braces: + "{'jack': 4098, 'sjoerd': 4127}" or "{4098: 'jack', 4127: + 'sjoerd'}" + + * Use a dict comprehension: "{}", "{x: x ** 2 for x in range(10)}" + + * Use the type constructor: "dict()", "dict([('foo', 100), ('bar', + 200)])", "dict(foo=100, bar=200)" + + If no positional argument is given, an empty dictionary is created. + If a positional argument is given and it defines a "keys()" method, + a dictionary is created by calling "__getitem__()" on the argument + with each returned key from the method. Otherwise, the positional + argument must be an *iterable* object. Each item in the iterable + must itself be an iterable with exactly two elements. The first + element of each item becomes a key in the new dictionary, and the + second element the corresponding value. If a key occurs more than + once, the last value for that key becomes the corresponding value + in the new dictionary. + + If keyword arguments are given, the keyword arguments and their + values are added to the dictionary created from the positional + argument. If a key being added is already present, the value from + the keyword argument replaces the value from the positional + argument. + + To illustrate, the following examples all return a dictionary equal + to "{"one": 1, "two": 2, "three": 3}": + + >>> a = dict(one=1, two=2, three=3) + >>> b = {'one': 1, 'two': 2, 'three': 3} + >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3])) + >>> d = dict([('two', 2), ('one', 1), ('three', 3)]) + >>> e = dict({'three': 3, 'one': 1, 'two': 2}) + >>> f = dict({'one': 1, 'three': 3}, two=2) + >>> a == b == c == d == e == f + True + + Providing keyword arguments as in the first example only works for + keys that are valid Python identifiers. Otherwise, any valid keys + can be used. + + These are the operations that dictionaries support (and therefore, + custom mapping types should support too): + + list(d) + + Return a list of all the keys used in the dictionary *d*. + + len(d) + + Return the number of items in the dictionary *d*. + + d[key] + + Return the item of *d* with key *key*. Raises a "KeyError" if + *key* is not in the map. + + If a subclass of dict defines a method "__missing__()" and *key* + is not present, the "d[key]" operation calls that method with + the key *key* as argument. The "d[key]" operation then returns + or raises whatever is returned or raised by the + "__missing__(key)" call. No other operations or methods invoke + "__missing__()". If "__missing__()" is not defined, "KeyError" + is raised. "__missing__()" must be a method; it cannot be an + instance variable: + + >>> class Counter(dict): + ... def __missing__(self, key): + ... return 0 + ... + >>> c = Counter() + >>> c['red'] + 0 + >>> c['red'] += 1 + >>> c['red'] + 1 + + The example above shows part of the implementation of + "collections.Counter". A different "__missing__" method is used + by "collections.defaultdict". + + d[key] = value + + Set "d[key]" to *value*. + + del d[key] + + Remove "d[key]" from *d*. Raises a "KeyError" if *key* is not + in the map. + + key in d + + Return "True" if *d* has a key *key*, else "False". + + key not in d + + Equivalent to "not key in d". + + iter(d) + + Return an iterator over the keys of the dictionary. This is a + shortcut for "iter(d.keys())". + + clear() + + Remove all items from the dictionary. + + copy() + + Return a shallow copy of the dictionary. + + classmethod fromkeys(iterable, value=None, /) + + Create a new dictionary with keys from *iterable* and values set + to *value*. + + "fromkeys()" is a class method that returns a new dictionary. + *value* defaults to "None". All of the values refer to just a + single instance, so it generally doesn’t make sense for *value* + to be a mutable object such as an empty list. To get distinct + values, use a dict comprehension instead. + + get(key, default=None) + + Return the value for *key* if *key* is in the dictionary, else + *default*. If *default* is not given, it defaults to "None", so + that this method never raises a "KeyError". + + items() + + Return a new view of the dictionary’s items ("(key, value)" + pairs). See the documentation of view objects. + + keys() + + Return a new view of the dictionary’s keys. See the + documentation of view objects. + + pop(key[, default]) + + If *key* is in the dictionary, remove it and return its value, + else return *default*. If *default* is not given and *key* is + not in the dictionary, a "KeyError" is raised. + + popitem() + + Remove and return a "(key, value)" pair from the dictionary. + Pairs are returned in LIFO (last-in, first-out) order. + + "popitem()" is useful to destructively iterate over a + dictionary, as often used in set algorithms. If the dictionary + is empty, calling "popitem()" raises a "KeyError". + + Changed in version 3.7: LIFO order is now guaranteed. In prior + versions, "popitem()" would return an arbitrary key/value pair. + + reversed(d) + + Return a reverse iterator over the keys of the dictionary. This + is a shortcut for "reversed(d.keys())". + + Added in version 3.8. + + setdefault(key, default=None) + + If *key* is in the dictionary, return its value. If not, insert + *key* with a value of *default* and return *default*. *default* + defaults to "None". + + update([other]) + + Update the dictionary with the key/value pairs from *other*, + overwriting existing keys. Return "None". + + "update()" accepts either another object with a "keys()" method + (in which case "__getitem__()" is called with every key returned + from the method) or an iterable of key/value pairs (as tuples or + other iterables of length two). If keyword arguments are + specified, the dictionary is then updated with those key/value + pairs: "d.update(red=1, blue=2)". + + values() + + Return a new view of the dictionary’s values. See the + documentation of view objects. + + An equality comparison between one "dict.values()" view and + another will always return "False". This also applies when + comparing "dict.values()" to itself: + + >>> d = {'a': 1} + >>> d.values() == d.values() + False + + d | other + + Create a new dictionary with the merged keys and values of *d* + and *other*, which must both be dictionaries. The values of + *other* take priority when *d* and *other* share keys. + + Added in version 3.9. + + d |= other + + Update the dictionary *d* with keys and values from *other*, + which may be either a *mapping* or an *iterable* of key/value + pairs. The values of *other* take priority when *d* and *other* + share keys. + + Added in version 3.9. + + Dictionaries compare equal if and only if they have the same "(key, + value)" pairs (regardless of ordering). Order comparisons (‘<’, + ‘<=’, ‘>=’, ‘>’) raise "TypeError". + + Dictionaries preserve insertion order. Note that updating a key + does not affect the order. Keys added after deletion are inserted + at the end. + + >>> d = {"one": 1, "two": 2, "three": 3, "four": 4} + >>> d + {'one': 1, 'two': 2, 'three': 3, 'four': 4} + >>> list(d) + ['one', 'two', 'three', 'four'] + >>> list(d.values()) + [1, 2, 3, 4] + >>> d["one"] = 42 + >>> d + {'one': 42, 'two': 2, 'three': 3, 'four': 4} + >>> del d["two"] + >>> d["two"] = None + >>> d + {'one': 42, 'three': 3, 'four': 4, 'two': None} + + Changed in version 3.7: Dictionary order is guaranteed to be + insertion order. This behavior was an implementation detail of + CPython from 3.6. + + Dictionaries and dictionary views are reversible. + + >>> d = {"one": 1, "two": 2, "three": 3, "four": 4} + >>> d + {'one': 1, 'two': 2, 'three': 3, 'four': 4} + >>> list(reversed(d)) + ['four', 'three', 'two', 'one'] + >>> list(reversed(d.values())) + [4, 3, 2, 1] + >>> list(reversed(d.items())) + [('four', 4), ('three', 3), ('two', 2), ('one', 1)] + + Changed in version 3.8: Dictionaries are now reversible. + +See also: + + "types.MappingProxyType" can be used to create a read-only view of a + "dict". + + +Dictionary view objects +======================= + +The objects returned by "dict.keys()", "dict.values()" and +"dict.items()" are *view objects*. They provide a dynamic view on the +dictionary’s entries, which means that when the dictionary changes, +the view reflects these changes. + +Dictionary views can be iterated over to yield their respective data, +and support membership tests: + +len(dictview) + + Return the number of entries in the dictionary. + +iter(dictview) + + Return an iterator over the keys, values or items (represented as + tuples of "(key, value)") in the dictionary. + + Keys and values are iterated over in insertion order. This allows + the creation of "(value, key)" pairs using "zip()": "pairs = + zip(d.values(), d.keys())". Another way to create the same list is + "pairs = [(v, k) for (k, v) in d.items()]". + + Iterating views while adding or deleting entries in the dictionary + may raise a "RuntimeError" or fail to iterate over all entries. + + Changed in version 3.7: Dictionary order is guaranteed to be + insertion order. + +x in dictview + + Return "True" if *x* is in the underlying dictionary’s keys, values + or items (in the latter case, *x* should be a "(key, value)" + tuple). + +reversed(dictview) + + Return a reverse iterator over the keys, values or items of the + dictionary. The view will be iterated in reverse order of the + insertion. + + Changed in version 3.8: Dictionary views are now reversible. + +dictview.mapping + + Return a "types.MappingProxyType" that wraps the original + dictionary to which the view refers. + + Added in version 3.10. + +Keys views are set-like since their entries are unique and *hashable*. +Items views also have set-like operations since the (key, value) pairs +are unique and the keys are hashable. If all values in an items view +are hashable as well, then the items view can interoperate with other +sets. (Values views are not treated as set-like since the entries are +generally not unique.) For set-like views, all of the operations +defined for the abstract base class "collections.abc.Set" are +available (for example, "==", "<", or "^"). While using set +operators, set-like views accept any iterable as the other operand, +unlike sets which only accept sets as the input. + +An example of dictionary view usage: + + >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, 'spam': 500} + >>> keys = dishes.keys() + >>> values = dishes.values() + + >>> # iteration + >>> n = 0 + >>> for val in values: + ... n += val + ... + >>> print(n) + 504 + + >>> # keys and values are iterated over in the same order (insertion order) + >>> list(keys) + ['eggs', 'sausage', 'bacon', 'spam'] + >>> list(values) + [2, 1, 1, 500] + + >>> # view objects are dynamic and reflect dict changes + >>> del dishes['eggs'] + >>> del dishes['sausage'] + >>> list(keys) + ['bacon', 'spam'] + + >>> # set operations + >>> keys & {'eggs', 'bacon', 'salad'} + {'bacon'} + >>> keys ^ {'sausage', 'juice'} == {'juice', 'sausage', 'bacon', 'spam'} + True + >>> keys | ['juice', 'juice', 'juice'] == {'bacon', 'spam', 'juice'} + True + + >>> # get back a read-only proxy for the original dictionary + >>> values.mapping + mappingproxy({'bacon': 1, 'spam': 500}) + >>> values.mapping['spam'] + 500 +''', + 'typesmethods': r'''Methods +******* + +Methods are functions that are called using the attribute notation. +There are two flavors: built-in methods (such as "append()" on lists) +and class instance method. Built-in methods are described with the +types that support them. + +If you access a method (a function defined in a class namespace) +through an instance, you get a special object: a *bound method* (also +called instance method) object. When called, it will add the "self" +argument to the argument list. Bound methods have two special read- +only attributes: "m.__self__" is the object on which the method +operates, and "m.__func__" is the function implementing the method. +Calling "m(arg-1, arg-2, ..., arg-n)" is completely equivalent to +calling "m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)". + +Like function objects, bound method objects support getting arbitrary +attributes. However, since method attributes are actually stored on +the underlying function object ("method.__func__"), setting method +attributes on bound methods is disallowed. Attempting to set an +attribute on a method results in an "AttributeError" being raised. In +order to set a method attribute, you need to explicitly set it on the +underlying function object: + + >>> class C: + ... def method(self): + ... pass + ... + >>> c = C() + >>> c.method.whoami = 'my name is method' # can't set on the method + Traceback (most recent call last): + File "", line 1, in + AttributeError: 'method' object has no attribute 'whoami' + >>> c.method.__func__.whoami = 'my name is method' + >>> c.method.whoami + 'my name is method' + +See Instance methods for more information. +''', + 'typesmodules': r'''Modules +******* + +The only special operation on a module is attribute access: "m.name", +where *m* is a module and *name* accesses a name defined in *m*’s +symbol table. Module attributes can be assigned to. (Note that the +"import" statement is not, strictly speaking, an operation on a module +object; "import foo" does not require a module object named *foo* to +exist, rather it requires an (external) *definition* for a module +named *foo* somewhere.) + +A special attribute of every module is "__dict__". This is the +dictionary containing the module’s symbol table. Modifying this +dictionary will actually change the module’s symbol table, but direct +assignment to the "__dict__" attribute is not possible (you can write +"m.__dict__['a'] = 1", which defines "m.a" to be "1", but you can’t +write "m.__dict__ = {}"). Modifying "__dict__" directly is not +recommended. + +Modules built into the interpreter are written like this: "". If loaded from a file, they are written as +"". +''', + 'typesseq': r'''Sequence Types — "list", "tuple", "range" +***************************************** + +There are three basic sequence types: lists, tuples, and range +objects. Additional sequence types tailored for processing of binary +data and text strings are described in dedicated sections. + + +Common Sequence Operations +========================== + +The operations in the following table are supported by most sequence +types, both mutable and immutable. The "collections.abc.Sequence" ABC +is provided to make it easier to correctly implement these operations +on custom sequence types. + +This table lists the sequence operations sorted in ascending priority. +In the table, *s* and *t* are sequences of the same type, *n*, *i*, +*j* and *k* are integers and *x* is an arbitrary object that meets any +type and value restrictions imposed by *s*. + +The "in" and "not in" operations have the same priorities as the +comparison operations. The "+" (concatenation) and "*" (repetition) +operations have the same priority as the corresponding numeric +operations. [3] + ++----------------------------+----------------------------------+------------+ +| Operation | Result | Notes | +|============================|==================================|============| +| "x in s" | "True" if an item of *s* is | (1) | +| | equal to *x*, else "False" | | ++----------------------------+----------------------------------+------------+ +| "x not in s" | "False" if an item of *s* is | (1) | +| | equal to *x*, else "True" | | ++----------------------------+----------------------------------+------------+ +| "s + t" | the concatenation of *s* and *t* | (6)(7) | ++----------------------------+----------------------------------+------------+ +| "s * n" or "n * s" | equivalent to adding *s* to | (2)(7) | +| | itself *n* times | | ++----------------------------+----------------------------------+------------+ +| "s[i]" | *i*th item of *s*, origin 0 | (3) | ++----------------------------+----------------------------------+------------+ +| "s[i:j]" | slice of *s* from *i* to *j* | (3)(4) | ++----------------------------+----------------------------------+------------+ +| "s[i:j:k]" | slice of *s* from *i* to *j* | (3)(5) | +| | with step *k* | | ++----------------------------+----------------------------------+------------+ +| "len(s)" | length of *s* | | ++----------------------------+----------------------------------+------------+ +| "min(s)" | smallest item of *s* | | ++----------------------------+----------------------------------+------------+ +| "max(s)" | largest item of *s* | | ++----------------------------+----------------------------------+------------+ +| "s.index(x[, i[, j]])" | index of the first occurrence of | (8) | +| | *x* in *s* (at or after index | | +| | *i* and before index *j*) | | ++----------------------------+----------------------------------+------------+ +| "s.count(x)" | total number of occurrences of | | +| | *x* in *s* | | ++----------------------------+----------------------------------+------------+ + +Sequences of the same type also support comparisons. In particular, +tuples and lists are compared lexicographically by comparing +corresponding elements. This means that to compare equal, every +element must compare equal and the two sequences must be of the same +type and have the same length. (For full details see Comparisons in +the language reference.) + +Forward and reversed iterators over mutable sequences access values +using an index. That index will continue to march forward (or +backward) even if the underlying sequence is mutated. The iterator +terminates only when an "IndexError" or a "StopIteration" is +encountered (or when the index drops below zero). + +Notes: + +1. While the "in" and "not in" operations are used only for simple + containment testing in the general case, some specialised sequences + (such as "str", "bytes" and "bytearray") also use them for + subsequence testing: + + >>> "gg" in "eggs" + True + +2. Values of *n* less than "0" are treated as "0" (which yields an + empty sequence of the same type as *s*). Note that items in the + sequence *s* are not copied; they are referenced multiple times. + This often haunts new Python programmers; consider: + + >>> lists = [[]] * 3 + >>> lists + [[], [], []] + >>> lists[0].append(3) + >>> lists + [[3], [3], [3]] + + What has happened is that "[[]]" is a one-element list containing + an empty list, so all three elements of "[[]] * 3" are references + to this single empty list. Modifying any of the elements of + "lists" modifies this single list. You can create a list of + different lists this way: + + >>> lists = [[] for i in range(3)] + >>> lists[0].append(3) + >>> lists[1].append(5) + >>> lists[2].append(7) + >>> lists + [[3], [5], [7]] + + Further explanation is available in the FAQ entry How do I create a + multidimensional list?. + +3. If *i* or *j* is negative, the index is relative to the end of + sequence *s*: "len(s) + i" or "len(s) + j" is substituted. But + note that "-0" is still "0". + +4. The slice of *s* from *i* to *j* is defined as the sequence of + items with index *k* such that "i <= k < j". If *i* or *j* is + greater than "len(s)", use "len(s)". If *i* is omitted or "None", + use "0". If *j* is omitted or "None", use "len(s)". If *i* is + greater than or equal to *j*, the slice is empty. + +5. The slice of *s* from *i* to *j* with step *k* is defined as the + sequence of items with index "x = i + n*k" such that "0 <= n < + (j-i)/k". In other words, the indices are "i", "i+k", "i+2*k", + "i+3*k" and so on, stopping when *j* is reached (but never + including *j*). When *k* is positive, *i* and *j* are reduced to + "len(s)" if they are greater. When *k* is negative, *i* and *j* are + reduced to "len(s) - 1" if they are greater. If *i* or *j* are + omitted or "None", they become “end†values (which end depends on + the sign of *k*). Note, *k* cannot be zero. If *k* is "None", it + is treated like "1". + +6. Concatenating immutable sequences always results in a new object. + This means that building up a sequence by repeated concatenation + will have a quadratic runtime cost in the total sequence length. + To get a linear runtime cost, you must switch to one of the + alternatives below: + + * if concatenating "str" objects, you can build a list and use + "str.join()" at the end or else write to an "io.StringIO" + instance and retrieve its value when complete + + * if concatenating "bytes" objects, you can similarly use + "bytes.join()" or "io.BytesIO", or you can do in-place + concatenation with a "bytearray" object. "bytearray" objects are + mutable and have an efficient overallocation mechanism + + * if concatenating "tuple" objects, extend a "list" instead + + * for other types, investigate the relevant class documentation + +7. Some sequence types (such as "range") only support item sequences + that follow specific patterns, and hence don’t support sequence + concatenation or repetition. + +8. "index" raises "ValueError" when *x* is not found in *s*. Not all + implementations support passing the additional arguments *i* and + *j*. These arguments allow efficient searching of subsections of + the sequence. Passing the extra arguments is roughly equivalent to + using "s[i:j].index(x)", only without copying any data and with the + returned index being relative to the start of the sequence rather + than the start of the slice. + + +Immutable Sequence Types +======================== + +The only operation that immutable sequence types generally implement +that is not also implemented by mutable sequence types is support for +the "hash()" built-in. + +This support allows immutable sequences, such as "tuple" instances, to +be used as "dict" keys and stored in "set" and "frozenset" instances. + +Attempting to hash an immutable sequence that contains unhashable +values will result in "TypeError". + + +Mutable Sequence Types +====================== + +The operations in the following table are defined on mutable sequence +types. The "collections.abc.MutableSequence" ABC is provided to make +it easier to correctly implement these operations on custom sequence +types. + +In the table *s* is an instance of a mutable sequence type, *t* is any +iterable object and *x* is an arbitrary object that meets any type and +value restrictions imposed by *s* (for example, "bytearray" only +accepts integers that meet the value restriction "0 <= x <= 255"). + ++--------------------------------+----------------------------------+-----------------------+ +| Operation | Result | Notes | +|================================|==================================|=======================| +| "s[i] = x" | item *i* of *s* is replaced by | | +| | *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j] = t" | slice of *s* from *i* to *j* is | | +| | replaced by the contents of the | | +| | iterable *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j]" | same as "s[i:j] = []" | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j:k] = t" | the elements of "s[i:j:k]" are | (1) | +| | replaced by those of *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j:k]" | removes the elements of | | +| | "s[i:j:k]" from the list | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.append(x)" | appends *x* to the end of the | | +| | sequence (same as | | +| | "s[len(s):len(s)] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.clear()" | removes all items from *s* (same | (5) | +| | as "del s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.copy()" | creates a shallow copy of *s* | (5) | +| | (same as "s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.extend(t)" or "s += t" | extends *s* with the contents of | | +| | *t* (for the most part the same | | +| | as "s[len(s):len(s)] = t") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s *= n" | updates *s* with its contents | (6) | +| | repeated *n* times | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.insert(i, x)" | inserts *x* into *s* at the | | +| | index given by *i* (same as | | +| | "s[i:i] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.pop()" or "s.pop(i)" | retrieves the item at *i* and | (2) | +| | also removes it from *s* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.remove(x)" | removes the first item from *s* | (3) | +| | where "s[i]" is equal to *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.reverse()" | reverses the items of *s* in | (4) | +| | place | | ++--------------------------------+----------------------------------+-----------------------+ + +Notes: + +1. If *k* is not equal to "1", *t* must have the same length as the + slice it is replacing. + +2. The optional argument *i* defaults to "-1", so that by default the + last item is removed and returned. + +3. "remove()" raises "ValueError" when *x* is not found in *s*. + +4. The "reverse()" method modifies the sequence in place for economy + of space when reversing a large sequence. To remind users that it + operates by side effect, it does not return the reversed sequence. + +5. "clear()" and "copy()" are included for consistency with the + interfaces of mutable containers that don’t support slicing + operations (such as "dict" and "set"). "copy()" is not part of the + "collections.abc.MutableSequence" ABC, but most concrete mutable + sequence classes provide it. + + Added in version 3.3: "clear()" and "copy()" methods. + +6. The value *n* is an integer, or an object implementing + "__index__()". Zero and negative values of *n* clear the sequence. + Items in the sequence are not copied; they are referenced multiple + times, as explained for "s * n" under Common Sequence Operations. + + +Lists +===== + +Lists are mutable sequences, typically used to store collections of +homogeneous items (where the precise degree of similarity will vary by +application). + +class list([iterable]) + + Lists may be constructed in several ways: + + * Using a pair of square brackets to denote the empty list: "[]" + + * Using square brackets, separating items with commas: "[a]", "[a, + b, c]" + + * Using a list comprehension: "[x for x in iterable]" + + * Using the type constructor: "list()" or "list(iterable)" + + The constructor builds a list whose items are the same and in the + same order as *iterable*’s items. *iterable* may be either a + sequence, a container that supports iteration, or an iterator + object. If *iterable* is already a list, a copy is made and + returned, similar to "iterable[:]". For example, "list('abc')" + returns "['a', 'b', 'c']" and "list( (1, 2, 3) )" returns "[1, 2, + 3]". If no argument is given, the constructor creates a new empty + list, "[]". + + Many other operations also produce lists, including the "sorted()" + built-in. + + Lists implement all of the common and mutable sequence operations. + Lists also provide the following additional method: + + sort(*, key=None, reverse=False) + + This method sorts the list in place, using only "<" comparisons + between items. Exceptions are not suppressed - if any comparison + operations fail, the entire sort operation will fail (and the + list will likely be left in a partially modified state). + + "sort()" accepts two arguments that can only be passed by + keyword (keyword-only arguments): + + *key* specifies a function of one argument that is used to + extract a comparison key from each list element (for example, + "key=str.lower"). The key corresponding to each item in the list + is calculated once and then used for the entire sorting process. + The default value of "None" means that list items are sorted + directly without calculating a separate key value. + + The "functools.cmp_to_key()" utility is available to convert a + 2.x style *cmp* function to a *key* function. + + *reverse* is a boolean value. If set to "True", then the list + elements are sorted as if each comparison were reversed. + + This method modifies the sequence in place for economy of space + when sorting a large sequence. To remind users that it operates + by side effect, it does not return the sorted sequence (use + "sorted()" to explicitly request a new sorted list instance). + + The "sort()" method is guaranteed to be stable. A sort is + stable if it guarantees not to change the relative order of + elements that compare equal — this is helpful for sorting in + multiple passes (for example, sort by department, then by salary + grade). + + For sorting examples and a brief sorting tutorial, see Sorting + Techniques. + + **CPython implementation detail:** While a list is being sorted, + the effect of attempting to mutate, or even inspect, the list is + undefined. The C implementation of Python makes the list appear + empty for the duration, and raises "ValueError" if it can detect + that the list has been mutated during a sort. + + +Tuples +====== + +Tuples are immutable sequences, typically used to store collections of +heterogeneous data (such as the 2-tuples produced by the "enumerate()" +built-in). Tuples are also used for cases where an immutable sequence +of homogeneous data is needed (such as allowing storage in a "set" or +"dict" instance). + +class tuple([iterable]) + + Tuples may be constructed in a number of ways: + + * Using a pair of parentheses to denote the empty tuple: "()" + + * Using a trailing comma for a singleton tuple: "a," or "(a,)" + + * Separating items with commas: "a, b, c" or "(a, b, c)" + + * Using the "tuple()" built-in: "tuple()" or "tuple(iterable)" + + The constructor builds a tuple whose items are the same and in the + same order as *iterable*’s items. *iterable* may be either a + sequence, a container that supports iteration, or an iterator + object. If *iterable* is already a tuple, it is returned + unchanged. For example, "tuple('abc')" returns "('a', 'b', 'c')" + and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no argument is + given, the constructor creates a new empty tuple, "()". + + Note that it is actually the comma which makes a tuple, not the + parentheses. The parentheses are optional, except in the empty + tuple case, or when they are needed to avoid syntactic ambiguity. + For example, "f(a, b, c)" is a function call with three arguments, + while "f((a, b, c))" is a function call with a 3-tuple as the sole + argument. + + Tuples implement all of the common sequence operations. + +For heterogeneous collections of data where access by name is clearer +than access by index, "collections.namedtuple()" may be a more +appropriate choice than a simple tuple object. + + +Ranges +====== + +The "range" type represents an immutable sequence of numbers and is +commonly used for looping a specific number of times in "for" loops. + +class range(stop) +class range(start, stop[, step]) + + The arguments to the range constructor must be integers (either + built-in "int" or any object that implements the "__index__()" + special method). If the *step* argument is omitted, it defaults to + "1". If the *start* argument is omitted, it defaults to "0". If + *step* is zero, "ValueError" is raised. + + For a positive *step*, the contents of a range "r" are determined + by the formula "r[i] = start + step*i" where "i >= 0" and "r[i] < + stop". + + For a negative *step*, the contents of the range are still + determined by the formula "r[i] = start + step*i", but the + constraints are "i >= 0" and "r[i] > stop". + + A range object will be empty if "r[0]" does not meet the value + constraint. Ranges do support negative indices, but these are + interpreted as indexing from the end of the sequence determined by + the positive indices. + + Ranges containing absolute values larger than "sys.maxsize" are + permitted but some features (such as "len()") may raise + "OverflowError". + + Range examples: + + >>> list(range(10)) + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> list(range(1, 11)) + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + >>> list(range(0, 30, 5)) + [0, 5, 10, 15, 20, 25] + >>> list(range(0, 10, 3)) + [0, 3, 6, 9] + >>> list(range(0, -10, -1)) + [0, -1, -2, -3, -4, -5, -6, -7, -8, -9] + >>> list(range(0)) + [] + >>> list(range(1, 0)) + [] + + Ranges implement all of the common sequence operations except + concatenation and repetition (due to the fact that range objects + can only represent sequences that follow a strict pattern and + repetition and concatenation will usually violate that pattern). + + start + + The value of the *start* parameter (or "0" if the parameter was + not supplied) + + stop + + The value of the *stop* parameter + + step + + The value of the *step* parameter (or "1" if the parameter was + not supplied) + +The advantage of the "range" type over a regular "list" or "tuple" is +that a "range" object will always take the same (small) amount of +memory, no matter the size of the range it represents (as it only +stores the "start", "stop" and "step" values, calculating individual +items and subranges as needed). + +Range objects implement the "collections.abc.Sequence" ABC, and +provide features such as containment tests, element index lookup, +slicing and support for negative indices (see Sequence Types — list, +tuple, range): + +>>> r = range(0, 20, 2) +>>> r +range(0, 20, 2) +>>> 11 in r +False +>>> 10 in r +True +>>> r.index(10) +5 +>>> r[5] +10 +>>> r[:5] +range(0, 10, 2) +>>> r[-1] +18 + +Testing range objects for equality with "==" and "!=" compares them as +sequences. That is, two range objects are considered equal if they +represent the same sequence of values. (Note that two range objects +that compare equal might have different "start", "stop" and "step" +attributes, for example "range(0) == range(2, 1, 3)" or "range(0, 3, +2) == range(0, 4, 2)".) + +Changed in version 3.2: Implement the Sequence ABC. Support slicing +and negative indices. Test "int" objects for membership in constant +time instead of iterating through all items. + +Changed in version 3.3: Define ‘==’ and ‘!=’ to compare range objects +based on the sequence of values they define (instead of comparing +based on object identity).Added the "start", "stop" and "step" +attributes. + +See also: + + * The linspace recipe shows how to implement a lazy version of range + suitable for floating-point applications. +''', + 'typesseq-mutable': r'''Mutable Sequence Types +********************** + +The operations in the following table are defined on mutable sequence +types. The "collections.abc.MutableSequence" ABC is provided to make +it easier to correctly implement these operations on custom sequence +types. + +In the table *s* is an instance of a mutable sequence type, *t* is any +iterable object and *x* is an arbitrary object that meets any type and +value restrictions imposed by *s* (for example, "bytearray" only +accepts integers that meet the value restriction "0 <= x <= 255"). + ++--------------------------------+----------------------------------+-----------------------+ +| Operation | Result | Notes | +|================================|==================================|=======================| +| "s[i] = x" | item *i* of *s* is replaced by | | +| | *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j] = t" | slice of *s* from *i* to *j* is | | +| | replaced by the contents of the | | +| | iterable *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j]" | same as "s[i:j] = []" | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j:k] = t" | the elements of "s[i:j:k]" are | (1) | +| | replaced by those of *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j:k]" | removes the elements of | | +| | "s[i:j:k]" from the list | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.append(x)" | appends *x* to the end of the | | +| | sequence (same as | | +| | "s[len(s):len(s)] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.clear()" | removes all items from *s* (same | (5) | +| | as "del s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.copy()" | creates a shallow copy of *s* | (5) | +| | (same as "s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.extend(t)" or "s += t" | extends *s* with the contents of | | +| | *t* (for the most part the same | | +| | as "s[len(s):len(s)] = t") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s *= n" | updates *s* with its contents | (6) | +| | repeated *n* times | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.insert(i, x)" | inserts *x* into *s* at the | | +| | index given by *i* (same as | | +| | "s[i:i] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.pop()" or "s.pop(i)" | retrieves the item at *i* and | (2) | +| | also removes it from *s* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.remove(x)" | removes the first item from *s* | (3) | +| | where "s[i]" is equal to *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.reverse()" | reverses the items of *s* in | (4) | +| | place | | ++--------------------------------+----------------------------------+-----------------------+ + +Notes: + +1. If *k* is not equal to "1", *t* must have the same length as the + slice it is replacing. + +2. The optional argument *i* defaults to "-1", so that by default the + last item is removed and returned. + +3. "remove()" raises "ValueError" when *x* is not found in *s*. + +4. The "reverse()" method modifies the sequence in place for economy + of space when reversing a large sequence. To remind users that it + operates by side effect, it does not return the reversed sequence. + +5. "clear()" and "copy()" are included for consistency with the + interfaces of mutable containers that don’t support slicing + operations (such as "dict" and "set"). "copy()" is not part of the + "collections.abc.MutableSequence" ABC, but most concrete mutable + sequence classes provide it. + + Added in version 3.3: "clear()" and "copy()" methods. + +6. The value *n* is an integer, or an object implementing + "__index__()". Zero and negative values of *n* clear the sequence. + Items in the sequence are not copied; they are referenced multiple + times, as explained for "s * n" under Common Sequence Operations. +''', + 'unary': r'''Unary arithmetic and bitwise operations +*************************************** + +All unary arithmetic and bitwise operations have the same priority: + + u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr + +The unary "-" (minus) operator yields the negation of its numeric +argument; the operation can be overridden with the "__neg__()" special +method. + +The unary "+" (plus) operator yields its numeric argument unchanged; +the operation can be overridden with the "__pos__()" special method. + +The unary "~" (invert) operator yields the bitwise inversion of its +integer argument. The bitwise inversion of "x" is defined as +"-(x+1)". It only applies to integral numbers or to custom objects +that override the "__invert__()" special method. + +In all three cases, if the argument does not have the proper type, a +"TypeError" exception is raised. +''', + 'while': r'''The "while" statement +********************* + +The "while" statement is used for repeated execution as long as an +expression is true: + + while_stmt ::= "while" assignment_expression ":" suite + ["else" ":" suite] + +This repeatedly tests the expression and, if it is true, executes the +first suite; if the expression is false (which may be the first time +it is tested) the suite of the "else" clause, if present, is executed +and the loop terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and goes back +to testing the expression. +''', + 'with': r'''The "with" statement +******************** + +The "with" statement is used to wrap the execution of a block with +methods defined by a context manager (see section With Statement +Context Managers). This allows common "try"…"except"…"finally" usage +patterns to be encapsulated for convenient reuse. + + with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | with_stmt_contents ) ":" suite + with_stmt_contents ::= with_item ("," with_item)* + with_item ::= expression ["as" target] + +The execution of the "with" statement with one “item†proceeds as +follows: + +1. The context expression (the expression given in the "with_item") is + evaluated to obtain a context manager. + +2. The context manager’s "__enter__()" is loaded for later use. + +3. The context manager’s "__exit__()" is loaded for later use. + +4. The context manager’s "__enter__()" method is invoked. + +5. If a target was included in the "with" statement, the return value + from "__enter__()" is assigned to it. + + Note: + + The "with" statement guarantees that if the "__enter__()" method + returns without an error, then "__exit__()" will always be + called. Thus, if an error occurs during the assignment to the + target list, it will be treated the same as an error occurring + within the suite would be. See step 7 below. + +6. The suite is executed. + +7. The context manager’s "__exit__()" method is invoked. If an + exception caused the suite to be exited, its type, value, and + traceback are passed as arguments to "__exit__()". Otherwise, three + "None" arguments are supplied. + + If the suite was exited due to an exception, and the return value + from the "__exit__()" method was false, the exception is reraised. + If the return value was true, the exception is suppressed, and + execution continues with the statement following the "with" + statement. + + If the suite was exited for any reason other than an exception, the + return value from "__exit__()" is ignored, and execution proceeds + at the normal location for the kind of exit that was taken. + +The following code: + + with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + enter = type(manager).__enter__ + exit = type(manager).__exit__ + value = enter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not exit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + exit(manager, None, None, None) + +With more than one item, the context managers are processed as if +multiple "with" statements were nested: + + with A() as a, B() as b: + SUITE + +is semantically equivalent to: + + with A() as a: + with B() as b: + SUITE + +You can also write multi-item context managers in multiple lines if +the items are surrounded by parentheses. For example: + + with ( + A() as a, + B() as b, + ): + SUITE + +Changed in version 3.1: Support for multiple context expressions. + +Changed in version 3.10: Support for using grouping parentheses to +break the statement in multiple lines. + +See also: + + **PEP 343** - The “with†statement + The specification, background, and examples for the Python "with" + statement. +''', + 'yield': r'''The "yield" statement +********************* + + yield_stmt ::= yield_expression + +A "yield" statement is semantically equivalent to a yield expression. +The "yield" statement can be used to omit the parentheses that would +otherwise be required in the equivalent yield expression statement. +For example, the yield statements + + yield + yield from + +are equivalent to the yield expression statements + + (yield ) + (yield from ) + +Yield expressions and statements are only used when defining a +*generator* function, and are only used in the body of the generator +function. Using "yield" in a function definition is sufficient to +cause that definition to create a generator function instead of a +normal function. + +For full details of "yield" semantics, refer to the Yield expressions +section. +''', +} diff --git a/Lib/shutil.py b/Lib/shutil.py index 171489ca41f2a7..510ae8c6f22d59 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -49,6 +49,7 @@ # https://bugs.python.org/issue43743#msg393429 _USE_CP_SENDFILE = (hasattr(os, "sendfile") and sys.platform.startswith(("linux", "android", "sunos"))) +_USE_CP_COPY_FILE_RANGE = hasattr(os, "copy_file_range") _HAS_FCOPYFILE = posix and hasattr(posix, "_fcopyfile") # macOS # CMD defaults in Windows 10 @@ -107,6 +108,66 @@ def _fastcopy_fcopyfile(fsrc, fdst, flags): else: raise err from None +def _determine_linux_fastcopy_blocksize(infd): + """Determine blocksize for fastcopying on Linux. + + Hopefully the whole file will be copied in a single call. + The copying itself should be performed in a loop 'till EOF is + reached (0 return) so a blocksize smaller or bigger than the actual + file size should not make any difference, also in case the file + content changes while being copied. + """ + try: + blocksize = max(os.fstat(infd).st_size, 2 ** 23) # min 8 MiB + except OSError: + blocksize = 2 ** 27 # 128 MiB + # On 32-bit architectures truncate to 1 GiB to avoid OverflowError, + # see gh-82500. + if sys.maxsize < 2 ** 32: + blocksize = min(blocksize, 2 ** 30) + return blocksize + +def _fastcopy_copy_file_range(fsrc, fdst): + """Copy data from one regular mmap-like fd to another by using + a high-performance copy_file_range(2) syscall that gives filesystems + an opportunity to implement the use of reflinks or server-side copy. + + This should work on Linux >= 4.5 only. + """ + try: + infd = fsrc.fileno() + outfd = fdst.fileno() + except Exception as err: + raise _GiveupOnFastCopy(err) # not a regular file + + blocksize = _determine_linux_fastcopy_blocksize(infd) + offset = 0 + while True: + try: + n_copied = os.copy_file_range(infd, outfd, blocksize, offset_dst=offset) + except OSError as err: + # ...in oder to have a more informative exception. + err.filename = fsrc.name + err.filename2 = fdst.name + + if err.errno == errno.ENOSPC: # filesystem is full + raise err from None + + # Give up on first call and if no data was copied. + if offset == 0 and os.lseek(outfd, 0, os.SEEK_CUR) == 0: + raise _GiveupOnFastCopy(err) + + raise err + else: + if n_copied == 0: + # If no bytes have been copied yet, copy_file_range + # might silently fail. + # https://lore.kernel.org/linux-fsdevel/20210126233840.GG4626@dread.disaster.area/T/#m05753578c7f7882f6e9ffe01f981bc223edef2b0 + if offset == 0: + raise _GiveupOnFastCopy() + break + offset += n_copied + def _fastcopy_sendfile(fsrc, fdst): """Copy data from one regular mmap-like fd to another by using high-performance sendfile(2) syscall. @@ -128,20 +189,7 @@ def _fastcopy_sendfile(fsrc, fdst): except Exception as err: raise _GiveupOnFastCopy(err) # not a regular file - # Hopefully the whole file will be copied in a single call. - # sendfile() is called in a loop 'till EOF is reached (0 return) - # so a bufsize smaller or bigger than the actual file size - # should not make any difference, also in case the file content - # changes while being copied. - try: - blocksize = max(os.fstat(infd).st_size, 2 ** 23) # min 8MiB - except OSError: - blocksize = 2 ** 27 # 128MiB - # On 32-bit architectures truncate to 1GiB to avoid OverflowError, - # see bpo-38319. - if sys.maxsize < 2 ** 32: - blocksize = min(blocksize, 2 ** 30) - + blocksize = _determine_linux_fastcopy_blocksize(infd) offset = 0 while True: try: @@ -266,12 +314,20 @@ def copyfile(src, dst, *, follow_symlinks=True): except _GiveupOnFastCopy: pass # Linux / Android / Solaris - elif _USE_CP_SENDFILE: - try: - _fastcopy_sendfile(fsrc, fdst) - return dst - except _GiveupOnFastCopy: - pass + elif _USE_CP_SENDFILE or _USE_CP_COPY_FILE_RANGE: + # reflink may be implicit in copy_file_range. + if _USE_CP_COPY_FILE_RANGE: + try: + _fastcopy_copy_file_range(fsrc, fdst) + return dst + except _GiveupOnFastCopy: + pass + if _USE_CP_SENDFILE: + try: + _fastcopy_sendfile(fsrc, fdst) + return dst + except _GiveupOnFastCopy: + pass # Windows, see: # https://github.com/python/cpython/pull/7160#discussion_r195405230 elif _WINDOWS and file_size > 0: diff --git a/Lib/site.py b/Lib/site.py index 92bd1ccdadd924..9da8b6724e1cec 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -633,12 +633,9 @@ def venv(known_paths): # Doing this here ensures venv takes precedence over user-site addsitepackages(known_paths, [sys.prefix]) - # addsitepackages will process site_prefix again if its in PREFIXES, - # but that's ok; known_paths will prevent anything being added twice if system_site == "true": - PREFIXES.insert(0, sys.prefix) + PREFIXES += [sys.base_prefix, sys.base_exec_prefix] else: - PREFIXES = [sys.prefix] ENABLE_USER_SITE = False return known_paths diff --git a/Lib/socket.py b/Lib/socket.py index be37c24d6174a2..727b0e75f03595 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -937,7 +937,9 @@ def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False, # Fail later on bind(), for platforms which may not # support this option. pass - if reuse_port: + # Since Linux 6.12.9, SO_REUSEPORT is not allowed + # on other address families than AF_INET/AF_INET6. + if reuse_port and family in (AF_INET, AF_INET6): sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1) if has_ipv6 and family == AF_INET6: if dualstack_ipv6: diff --git a/Lib/socketserver.py b/Lib/socketserver.py index cd028ef1c63b85..35b2723de3babe 100644 --- a/Lib/socketserver.py +++ b/Lib/socketserver.py @@ -468,7 +468,12 @@ def server_bind(self): """ if self.allow_reuse_address and hasattr(socket, "SO_REUSEADDR"): self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT"): + # Since Linux 6.12.9, SO_REUSEPORT is not allowed + # on other address families than AF_INET/AF_INET6. + if ( + self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT") + and self.address_family in (socket.AF_INET, socket.AF_INET6) + ): self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.socket.bind(self.server_address) self.server_address = self.socket.getsockname() diff --git a/Lib/sqlite3/__init__.py b/Lib/sqlite3/__init__.py index 34a9c047dd607c..ed727fae609d1d 100644 --- a/Lib/sqlite3/__init__.py +++ b/Lib/sqlite3/__init__.py @@ -22,7 +22,7 @@ """ The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compliant -interface to the SQLite library, and requires SQLite 3.7.15 or newer. +interface to the SQLite library, and requires SQLite 3.15.2 or newer. To use the module, start by creating a database Connection object: diff --git a/Lib/string.py b/Lib/string.py index 2eab6d4f595c4e..c4f05c7223ce8a 100644 --- a/Lib/string.py +++ b/Lib/string.py @@ -212,19 +212,20 @@ def _vformat(self, format_string, args, kwargs, used_args, recursion_depth, # this is some markup, find the object and do # the formatting - # handle arg indexing when empty field_names are given. - if field_name == '': + # handle arg indexing when empty field first parts are given. + field_first, _ = _string.formatter_field_name_split(field_name) + if field_first == '': if auto_arg_index is False: raise ValueError('cannot switch from manual field ' 'specification to automatic field ' 'numbering') - field_name = str(auto_arg_index) + field_name = str(auto_arg_index) + field_name auto_arg_index += 1 - elif field_name.isdigit(): + elif isinstance(field_first, int): if auto_arg_index: - raise ValueError('cannot switch from manual field ' - 'specification to automatic field ' - 'numbering') + raise ValueError('cannot switch from automatic field ' + 'numbering to manual field ' + 'specification') # disable auto arg incrementing, if it gets # used later on, then an exception will be raised auto_arg_index = False diff --git a/Lib/subprocess.py b/Lib/subprocess.py index de88eedcf80ff9..2044d2a42897e9 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -43,10 +43,8 @@ import builtins import errno import io -import locale import os import time -import signal import sys import threading import warnings @@ -144,6 +142,8 @@ def __init__(self, returncode, cmd, output=None, stderr=None): def __str__(self): if self.returncode and self.returncode < 0: + # Lazy import to improve module import time + import signal try: return "Command '%s' died with %r." % ( self.cmd, signal.Signals(-self.returncode)) @@ -381,6 +381,8 @@ def _text_encoding(): if sys.flags.utf8_mode: return "utf-8" else: + # Lazy import to improve module import time + import locale return locale.getencoding() @@ -1664,6 +1666,9 @@ def send_signal(self, sig): # Don't signal a process that we know has already died. if self.returncode is not None: return + + # Lazy import to improve module import time + import signal if sig == signal.SIGTERM: self.terminate() elif sig == signal.CTRL_C_EVENT: @@ -1765,6 +1770,9 @@ def _posix_spawn(self, args, executable, env, restore_signals, close_fds, """Execute program using os.posix_spawn().""" kwargs = {} if restore_signals: + # Lazy import to improve module import time + import signal + # See _Py_RestoreSignals() in Python/pylifecycle.c sigset = [] for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'): @@ -2214,9 +2222,13 @@ def send_signal(self, sig): def terminate(self): """Terminate the process with SIGTERM """ + # Lazy import to improve module import time + import signal self.send_signal(signal.SIGTERM) def kill(self): """Kill the process with SIGKILL """ + # Lazy import to improve module import time + import signal self.send_signal(signal.SIGKILL) diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index ed7b6a335d01d4..69f72452c4069a 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -116,8 +116,10 @@ def _getuserbase(): if env_base: return env_base - # Emscripten, iOS, tvOS, VxWorks, WASI, and watchOS have no home directories - if sys.platform in {"emscripten", "ios", "tvos", "vxworks", "wasi", "watchos"}: + # Emscripten, iOS, tvOS, VxWorks, WASI, and watchOS have no home directories. + # Use _PYTHON_HOST_PLATFORM to get the correct platform when cross-compiling. + system_name = os.environ.get('_PYTHON_HOST_PLATFORM', sys.platform).split('-')[0] + if system_name in {"emscripten", "ios", "tvos", "vxworks", "wasi", "watchos"}: return None def joinuser(*args): @@ -220,8 +222,15 @@ def _safe_realpath(path): def is_python_build(check_home=None): if check_home is not None: import warnings - warnings.warn("check_home argument is deprecated and ignored.", - DeprecationWarning, stacklevel=2) + warnings.warn( + ( + 'The check_home argument of sysconfig.is_python_build is ' + 'deprecated and its value is ignored. ' + 'It will be removed in Python 3.15.' + ), + DeprecationWarning, + stacklevel=2, + ) for fn in ("Setup", "Setup.local"): if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): return True @@ -335,6 +344,18 @@ def get_makefile_filename(): return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') +def _import_from_directory(path, name): + if name not in sys.modules: + import importlib.machinery + import importlib.util + + spec = importlib.machinery.PathFinder.find_spec(name, [path]) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + sys.modules[name] = module + return sys.modules[name] + + def _get_sysconfigdata_name(): multiarch = getattr(sys.implementation, '_multiarch', '') return os.environ.get( @@ -342,27 +363,34 @@ def _get_sysconfigdata_name(): f'_sysconfigdata_{sys.abiflags}_{sys.platform}_{multiarch}', ) -def _init_posix(vars): - """Initialize the module as appropriate for POSIX systems.""" - # _sysconfigdata is generated at build time, see _generate_posix_vars() + +def _get_sysconfigdata(): + import importlib + name = _get_sysconfigdata_name() + path = os.environ.get('_PYTHON_SYSCONFIGDATA_PATH') + module = _import_from_directory(path, name) if path else importlib.import_module(name) - # For cross builds, the path to the target's sysconfigdata must be specified - # so it can be imported. It cannot be in PYTHONPATH, as foreign modules in - # sys.path can cause crashes when loaded by the host interpreter. - # Rely on truthiness as a valueless env variable is still an empty string. - # See OS X note in _generate_posix_vars re _sysconfigdata. - if (path := os.environ.get('_PYTHON_SYSCONFIGDATA_PATH')): - from importlib.machinery import FileFinder, SourceFileLoader, SOURCE_SUFFIXES - from importlib.util import module_from_spec - spec = FileFinder(path, (SourceFileLoader, SOURCE_SUFFIXES)).find_spec(name) - _temp = module_from_spec(spec) - spec.loader.exec_module(_temp) - else: - _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) - build_time_vars = _temp.build_time_vars + return module.build_time_vars + + +def _installation_is_relocated(): + """Is the Python installation running from a different prefix than what was targetted when building?""" + if os.name != 'posix': + raise NotImplementedError('sysconfig._installation_is_relocated() is currently only supported on POSIX') + + data = _get_sysconfigdata() + return ( + data['prefix'] != getattr(sys, 'base_prefix', '') + or data['exec_prefix'] != getattr(sys, 'base_exec_prefix', '') + ) + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" # GH-126920: Make sure we don't overwrite any of the keys already set - vars.update(build_time_vars | vars) + vars.update(_get_sysconfigdata() | vars) + def _init_non_posix(vars): """Initialize the module as appropriate for NT""" @@ -485,10 +513,10 @@ def _init_config_vars(): _init_posix(_CONFIG_VARS) # If we are cross-compiling, load the prefixes from the Makefile instead. if '_PYTHON_PROJECT_BASE' in os.environ: - prefix = _CONFIG_VARS['prefix'] - exec_prefix = _CONFIG_VARS['exec_prefix'] - base_prefix = _CONFIG_VARS['prefix'] - base_exec_prefix = _CONFIG_VARS['exec_prefix'] + prefix = _CONFIG_VARS['host_prefix'] + exec_prefix = _CONFIG_VARS['host_exec_prefix'] + base_prefix = _CONFIG_VARS['host_prefix'] + base_exec_prefix = _CONFIG_VARS['host_exec_prefix'] abiflags = _CONFIG_VARS['ABIFLAGS'] # Normalized versions of prefix and exec_prefix are handy to have; @@ -616,7 +644,8 @@ def get_platform(): solaris-2.6-sun4u Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-amd64 (64-bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-arm64 (64-bit Windows on ARM64 (aka AArch64) win32 (all others - specifically, sys.platform is returned) For other non-POSIX platforms, currently just returns 'sys.platform'. @@ -715,8 +744,20 @@ def expand_makefile_vars(s, vars): variable expansions; if 'vars' is the output of 'parse_makefile()', you're fine. Returns a variable-expanded version of 's'. """ + + import warnings + warnings.warn( + 'sysconfig.expand_makefile_vars is deprecated and will be removed in ' + 'Python 3.16. Use sysconfig.get_paths(vars=...) instead.', + DeprecationWarning, + stacklevel=2, + ) + import re + _findvar1_rx = r"\$\(([A-Za-z][A-Za-z0-9_]*)\)" + _findvar2_rx = r"\${([A-Za-z][A-Za-z0-9_]*)}" + # This algorithm does multiple expansion, so if vars['foo'] contains # "${bar}", it will expand ${foo} to ${bar}, and then expand # ${bar}... and so forth. This is fine as long as 'vars' comes from diff --git a/Lib/sysconfig/__main__.py b/Lib/sysconfig/__main__.py index 10728c709e1811..bc2197cfe79402 100644 --- a/Lib/sysconfig/__main__.py +++ b/Lib/sysconfig/__main__.py @@ -232,10 +232,14 @@ def _generate_posix_vars(): print(f'Written {destfile}') + install_vars = get_config_vars() + # Fix config vars to match the values after install (of the default environment) + install_vars['projectbase'] = install_vars['BINDIR'] + install_vars['srcdir'] = install_vars['LIBPL'] # Write a JSON file with the output of sysconfig.get_config_vars jsonfile = os.path.join(pybuilddir, _get_json_data_name()) with open(jsonfile, 'w') as f: - json.dump(get_config_vars(), f, indent=2) + json.dump(install_vars, f, indent=2) print(f'Written {jsonfile}') diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py index 493932d6c6d441..0ce42276bfe3d6 100644 --- a/Lib/test/_test_eintr.py +++ b/Lib/test/_test_eintr.py @@ -91,7 +91,7 @@ class OSEINTRTest(EINTRBaseTest): """ EINTR tests for the os module. """ def new_sleep_process(self): - code = 'import time; time.sleep(%r)' % self.sleep_time + code = f'import time; time.sleep({self.sleep_time!r})' return self.subprocess(code) def _test_wait_multiple(self, wait_func): @@ -123,35 +123,46 @@ def test_waitpid(self): def test_wait4(self): self._test_wait_single(lambda pid: os.wait4(pid, 0)) - def test_read(self): + def _interrupted_reads(self): + """Make a fd which will force block on read of expected bytes.""" rd, wr = os.pipe() self.addCleanup(os.close, rd) # wr closed explicitly by parent # the payload below are smaller than PIPE_BUF, hence the writes will be # atomic - datas = [b"hello", b"world", b"spam"] + data = [b"hello", b"world", b"spam"] code = '\n'.join(( 'import os, sys, time', '', 'wr = int(sys.argv[1])', - 'datas = %r' % datas, - 'sleep_time = %r' % self.sleep_time, + f'data = {data!r}', + f'sleep_time = {self.sleep_time!r}', '', - 'for data in datas:', + 'for item in data:', ' # let the parent block on read()', ' time.sleep(sleep_time)', - ' os.write(wr, data)', + ' os.write(wr, item)', )) proc = self.subprocess(code, str(wr), pass_fds=[wr]) with kill_on_error(proc): os.close(wr) - for data in datas: - self.assertEqual(data, os.read(rd, len(data))) + for datum in data: + yield rd, datum self.assertEqual(proc.wait(), 0) + def test_read(self): + for fd, expected in self._interrupted_reads(): + self.assertEqual(expected, os.read(fd, len(expected))) + + def test_readinto(self): + for fd, expected in self._interrupted_reads(): + buffer = bytearray(len(expected)) + self.assertEqual(os.readinto(fd, buffer), len(expected)) + self.assertEqual(buffer, expected) + def test_write(self): rd, wr = os.pipe() self.addCleanup(os.close, wr) @@ -164,8 +175,8 @@ def test_write(self): 'import io, os, sys, time', '', 'rd = int(sys.argv[1])', - 'sleep_time = %r' % self.sleep_time, - 'data = b"x" * %s' % support.PIPE_MAX_SIZE, + f'sleep_time = {self.sleep_time!r}', + f'data = b"x" * {support.PIPE_MAX_SIZE}', 'data_len = len(data)', '', '# let the parent block on write()', @@ -178,8 +189,8 @@ def test_write(self): '', 'value = read_data.getvalue()', 'if value != data:', - ' raise Exception("read error: %s vs %s bytes"', - ' % (len(value), data_len))', + ' raise Exception(f"read error: {len(value)}' + ' vs {data_len} bytes")', )) proc = self.subprocess(code, str(rd), pass_fds=[rd]) @@ -202,33 +213,33 @@ def _test_recv(self, recv_func): # wr closed explicitly by parent # single-byte payload guard us against partial recv - datas = [b"x", b"y", b"z"] + data = [b"x", b"y", b"z"] code = '\n'.join(( 'import os, socket, sys, time', '', 'fd = int(sys.argv[1])', - 'family = %s' % int(wr.family), - 'sock_type = %s' % int(wr.type), - 'datas = %r' % datas, - 'sleep_time = %r' % self.sleep_time, + f'family = {int(wr.family)}', + f'sock_type = {int(wr.type)}', + f'data = {data!r}', + f'sleep_time = {self.sleep_time!r}', '', 'wr = socket.fromfd(fd, family, sock_type)', 'os.close(fd)', '', 'with wr:', - ' for data in datas:', + ' for item in data:', ' # let the parent block on recv()', ' time.sleep(sleep_time)', - ' wr.sendall(data)', + ' wr.sendall(item)', )) fd = wr.fileno() proc = self.subprocess(code, str(fd), pass_fds=[fd]) with kill_on_error(proc): wr.close() - for data in datas: - self.assertEqual(data, recv_func(rd, len(data))) + for item in data: + self.assertEqual(item, recv_func(rd, len(item))) self.assertEqual(proc.wait(), 0) def test_recv(self): @@ -250,10 +261,10 @@ def _test_send(self, send_func): 'import os, socket, sys, time', '', 'fd = int(sys.argv[1])', - 'family = %s' % int(rd.family), - 'sock_type = %s' % int(rd.type), - 'sleep_time = %r' % self.sleep_time, - 'data = b"xyz" * %s' % (support.SOCK_MAX_SIZE // 3), + f'family = {int(rd.family)}', + f'sock_type = {int(rd.type)}', + f'sleep_time = {self.sleep_time!r}', + f'data = b"xyz" * {support.SOCK_MAX_SIZE // 3}', 'data_len = len(data)', '', 'rd = socket.fromfd(fd, family, sock_type)', @@ -269,8 +280,8 @@ def _test_send(self, send_func): ' n += rd.recv_into(memoryview(received_data)[n:])', '', 'if received_data != data:', - ' raise Exception("recv error: %s vs %s bytes"', - ' % (len(received_data), data_len))', + ' raise Exception(f"recv error: {len(received_data)}' + ' vs {data_len} bytes")', )) fd = rd.fileno() @@ -302,9 +313,9 @@ def test_accept(self): code = '\n'.join(( 'import socket, time', '', - 'host = %r' % socket_helper.HOST, - 'port = %s' % port, - 'sleep_time = %r' % self.sleep_time, + f'host = {socket_helper.HOST!r}', + f'port = {port}', + f'sleep_time = {self.sleep_time!r}', '', '# let parent block on accept()', 'time.sleep(sleep_time)', @@ -332,15 +343,15 @@ def _test_open(self, do_open_close_reader, do_open_close_writer): os_helper.unlink(filename) try: os.mkfifo(filename) - except PermissionError as e: - self.skipTest('os.mkfifo(): %s' % e) + except PermissionError as exc: + self.skipTest(f'os.mkfifo(): {exc!r}') self.addCleanup(os_helper.unlink, filename) code = '\n'.join(( 'import os, time', '', - 'path = %a' % filename, - 'sleep_time = %r' % self.sleep_time, + f'path = {filename!a}', + f'sleep_time = {self.sleep_time!r}', '', '# let the parent block', 'time.sleep(sleep_time)', @@ -396,21 +407,20 @@ class SignalEINTRTest(EINTRBaseTest): def check_sigwait(self, wait_func): signum = signal.SIGUSR1 - pid = os.getpid() old_handler = signal.signal(signum, lambda *args: None) self.addCleanup(signal.signal, signum, old_handler) code = '\n'.join(( 'import os, time', - 'pid = %s' % os.getpid(), - 'signum = %s' % int(signum), - 'sleep_time = %r' % self.sleep_time, + f'pid = {os.getpid()}', + f'signum = {int(signum)}', + f'sleep_time = {self.sleep_time!r}', 'time.sleep(sleep_time)', 'os.kill(pid, signum)', )) - old_mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signum]) + signal.pthread_sigmask(signal.SIG_BLOCK, [signum]) self.addCleanup(signal.pthread_sigmask, signal.SIG_UNBLOCK, [signum]) proc = self.subprocess(code) diff --git a/Lib/test/_test_embed_set_config.py b/Lib/test/_test_embed_set_config.py deleted file mode 100644 index 7edb35da463aa0..00000000000000 --- a/Lib/test/_test_embed_set_config.py +++ /dev/null @@ -1,291 +0,0 @@ -# bpo-42260: Test _PyInterpreterState_GetConfigCopy() -# and _PyInterpreterState_SetConfig(). -# -# Test run in a subprocess since set_config(get_config()) -# does reset sys attributes to their state of the Python startup -# (before the site module is run). - -import _testinternalcapi -import sys -import unittest -from test import support -from test.support import MS_WINDOWS - - -MAX_HASH_SEED = 4294967295 - - -BOOL_OPTIONS = [ - 'isolated', - 'use_environment', - 'dev_mode', - 'install_signal_handlers', - 'use_hash_seed', - 'faulthandler', - 'import_time', - 'code_debug_ranges', - 'show_ref_count', - 'dump_refs', - 'malloc_stats', - 'parse_argv', - 'site_import', - 'warn_default_encoding', - 'inspect', - 'interactive', - 'parser_debug', - 'write_bytecode', - 'quiet', - 'user_site_directory', - 'configure_c_stdio', - 'buffered_stdio', - 'use_frozen_modules', - 'safe_path', - 'pathconfig_warnings', - 'module_search_paths_set', - 'skip_source_first_line', - '_install_importlib', - '_init_main', - '_is_python_build', -] -if MS_WINDOWS: - BOOL_OPTIONS.append('legacy_windows_stdio') - - -class SetConfigTests(unittest.TestCase): - def setUp(self): - self.old_config = _testinternalcapi.get_config() - self.sys_copy = dict(sys.__dict__) - - def tearDown(self): - _testinternalcapi.reset_path_config() - _testinternalcapi.set_config(self.old_config) - sys.__dict__.clear() - sys.__dict__.update(self.sys_copy) - - def set_config(self, **kwargs): - _testinternalcapi.set_config(self.old_config | kwargs) - - def check(self, **kwargs): - self.set_config(**kwargs) - for key, value in kwargs.items(): - self.assertEqual(getattr(sys, key), value, - (key, value)) - - def test_set_invalid(self): - invalid_uint = -1 - NULL = None - invalid_wstr = NULL - # PyWideStringList strings must be non-NULL - invalid_wstrlist = ["abc", NULL, "def"] - - type_tests = [] - value_tests = [ - # enum - ('_config_init', 0), - ('_config_init', 4), - # unsigned long - ("hash_seed", -1), - ("hash_seed", MAX_HASH_SEED + 1), - ] - - # int (unsigned) - int_options = [ - '_config_init', - 'bytes_warning', - 'optimization_level', - 'tracemalloc', - 'verbose', - ] - int_options.extend(BOOL_OPTIONS) - for key in int_options: - value_tests.append((key, invalid_uint)) - type_tests.append((key, "abc")) - type_tests.append((key, 2.0)) - - # wchar_t* - for key in ( - 'filesystem_encoding', - 'filesystem_errors', - 'stdio_encoding', - 'stdio_errors', - 'check_hash_pycs_mode', - 'program_name', - 'platlibdir', - # optional wstr: - # 'pythonpath_env' - # 'home' - # 'pycache_prefix' - # 'run_command' - # 'run_module' - # 'run_filename' - # 'executable' - # 'prefix' - # 'exec_prefix' - # 'base_executable' - # 'base_prefix' - # 'base_exec_prefix' - ): - value_tests.append((key, invalid_wstr)) - type_tests.append((key, b'bytes')) - type_tests.append((key, 123)) - - # PyWideStringList - for key in ( - 'orig_argv', - 'argv', - 'xoptions', - 'warnoptions', - 'module_search_paths', - ): - if key != 'xoptions': - value_tests.append((key, invalid_wstrlist)) - type_tests.append((key, 123)) - type_tests.append((key, "abc")) - type_tests.append((key, [123])) - type_tests.append((key, [b"bytes"])) - - - if MS_WINDOWS: - value_tests.append(('legacy_windows_stdio', invalid_uint)) - - for exc_type, tests in ( - (ValueError, value_tests), - (TypeError, type_tests), - ): - for key, value in tests: - config = self.old_config | {key: value} - with self.subTest(key=key, value=value, exc_type=exc_type): - with self.assertRaises(exc_type): - _testinternalcapi.set_config(config) - - def test_flags(self): - bool_options = set(BOOL_OPTIONS) - for sys_attr, key, value in ( - ("debug", "parser_debug", 2), - ("inspect", "inspect", 3), - ("interactive", "interactive", 4), - ("optimize", "optimization_level", 5), - ("verbose", "verbose", 6), - ("bytes_warning", "bytes_warning", 7), - ("quiet", "quiet", 8), - ("isolated", "isolated", 9), - ): - with self.subTest(sys=sys_attr, key=key, value=value): - self.set_config(**{key: value, 'parse_argv': 0}) - if key in bool_options: - self.assertEqual(getattr(sys.flags, sys_attr), int(bool(value))) - else: - self.assertEqual(getattr(sys.flags, sys_attr), value) - - self.set_config(write_bytecode=0) - self.assertEqual(sys.flags.dont_write_bytecode, True) - self.assertEqual(sys.dont_write_bytecode, True) - - self.set_config(write_bytecode=1) - self.assertEqual(sys.flags.dont_write_bytecode, False) - self.assertEqual(sys.dont_write_bytecode, False) - - self.set_config(user_site_directory=0, isolated=0) - self.assertEqual(sys.flags.no_user_site, 1) - self.set_config(user_site_directory=1, isolated=0) - self.assertEqual(sys.flags.no_user_site, 0) - - self.set_config(site_import=0) - self.assertEqual(sys.flags.no_site, 1) - self.set_config(site_import=1) - self.assertEqual(sys.flags.no_site, 0) - - self.set_config(dev_mode=0) - self.assertEqual(sys.flags.dev_mode, False) - self.set_config(dev_mode=1) - self.assertEqual(sys.flags.dev_mode, True) - - self.set_config(use_environment=0, isolated=0) - self.assertEqual(sys.flags.ignore_environment, 1) - self.set_config(use_environment=1, isolated=0) - self.assertEqual(sys.flags.ignore_environment, 0) - - self.set_config(use_hash_seed=1, hash_seed=0) - self.assertEqual(sys.flags.hash_randomization, 0) - self.set_config(use_hash_seed=0, hash_seed=0) - self.assertEqual(sys.flags.hash_randomization, 1) - self.set_config(use_hash_seed=1, hash_seed=123) - self.assertEqual(sys.flags.hash_randomization, 1) - - if support.Py_GIL_DISABLED: - self.set_config(enable_gil=-1) - self.assertEqual(sys.flags.gil, None) - self.set_config(enable_gil=0) - self.assertEqual(sys.flags.gil, 0) - self.set_config(enable_gil=1) - self.assertEqual(sys.flags.gil, 1) - else: - # Builds without Py_GIL_DISABLED don't have - # PyConfig.enable_gil. sys.flags.gil is always defined to 1, for - # consistency. - self.assertEqual(sys.flags.gil, 1) - - def test_options(self): - self.check(warnoptions=[]) - self.check(warnoptions=["default", "ignore"]) - - self.set_config(xoptions={}) - self.assertEqual(sys._xoptions, {}) - self.set_config(xoptions={"dev": True, "tracemalloc": "5"}) - self.assertEqual(sys._xoptions, {"dev": True, "tracemalloc": "5"}) - - def test_pathconfig(self): - self.check( - executable='executable', - prefix="prefix", - base_prefix="base_prefix", - exec_prefix="exec_prefix", - base_exec_prefix="base_exec_prefix", - platlibdir="platlibdir") - - self.set_config(base_executable="base_executable") - self.assertEqual(sys._base_executable, "base_executable") - - # When base_xxx is NULL, value is copied from xxxx - self.set_config( - executable='executable', - prefix="prefix", - exec_prefix="exec_prefix", - base_executable=None, - base_prefix=None, - base_exec_prefix=None) - self.assertEqual(sys._base_executable, "executable") - self.assertEqual(sys.base_prefix, "prefix") - self.assertEqual(sys.base_exec_prefix, "exec_prefix") - - def test_path(self): - self.set_config(module_search_paths_set=1, - module_search_paths=['a', 'b', 'c']) - self.assertEqual(sys.path, ['a', 'b', 'c']) - - # sys.path is reset if module_search_paths_set=0 - self.set_config(module_search_paths_set=0, - module_search_paths=['new_path']) - self.assertNotEqual(sys.path, ['a', 'b', 'c']) - self.assertNotEqual(sys.path, ['new_path']) - - def test_argv(self): - self.set_config(parse_argv=0, - argv=['python_program', 'args'], - orig_argv=['orig', 'orig_args']) - self.assertEqual(sys.argv, ['python_program', 'args']) - self.assertEqual(sys.orig_argv, ['orig', 'orig_args']) - - self.set_config(parse_argv=0, - argv=[], - orig_argv=[]) - self.assertEqual(sys.argv, ['']) - self.assertEqual(sys.orig_argv, []) - - def test_pycache_prefix(self): - self.check(pycache_prefix=None) - self.check(pycache_prefix="pycache_prefix") - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 38a03f3391d31d..4b7c3e7fa8bdd7 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -319,7 +319,7 @@ def test_current(self): authkey = current.authkey self.assertTrue(current.is_alive()) - self.assertTrue(not current.daemon) + self.assertFalse(current.daemon) self.assertIsInstance(authkey, bytes) self.assertTrue(len(authkey) > 0) self.assertEqual(current.ident, os.getpid()) @@ -463,7 +463,7 @@ def test_process(self): self.assertEqual(p.is_alive(), False) self.assertEqual(p.daemon, True) self.assertNotIn(p, self.active_children()) - self.assertTrue(type(self.active_children()) is list) + self.assertIs(type(self.active_children()), list) self.assertEqual(p.exitcode, None) p.start() @@ -583,8 +583,8 @@ def test_cpu_count(self): cpus = multiprocessing.cpu_count() except NotImplementedError: cpus = 1 - self.assertTrue(type(cpus) is int) - self.assertTrue(cpus >= 1) + self.assertIsInstance(cpus, int) + self.assertGreaterEqual(cpus, 1) def test_active_children(self): self.assertEqual(type(self.active_children()), list) @@ -2382,14 +2382,14 @@ def test_getobj_getlock(self): self.assertEqual(lock, lock3) arr4 = self.Value('i', 5, lock=False) - self.assertFalse(hasattr(arr4, 'get_lock')) - self.assertFalse(hasattr(arr4, 'get_obj')) + self.assertNotHasAttr(arr4, 'get_lock') + self.assertNotHasAttr(arr4, 'get_obj') self.assertRaises(AttributeError, self.Value, 'i', 5, lock='navalue') arr5 = self.RawValue('i', 5) - self.assertFalse(hasattr(arr5, 'get_lock')) - self.assertFalse(hasattr(arr5, 'get_obj')) + self.assertNotHasAttr(arr5, 'get_lock') + self.assertNotHasAttr(arr5, 'get_obj') class _TestArray(BaseTestCase): @@ -2462,14 +2462,14 @@ def test_getobj_getlock_obj(self): self.assertEqual(lock, lock3) arr4 = self.Array('i', range(10), lock=False) - self.assertFalse(hasattr(arr4, 'get_lock')) - self.assertFalse(hasattr(arr4, 'get_obj')) + self.assertNotHasAttr(arr4, 'get_lock') + self.assertNotHasAttr(arr4, 'get_obj') self.assertRaises(AttributeError, self.Array, 'i', range(10), lock='notalock') arr5 = self.RawArray('i', range(10)) - self.assertFalse(hasattr(arr5, 'get_lock')) - self.assertFalse(hasattr(arr5, 'get_obj')) + self.assertNotHasAttr(arr5, 'get_lock') + self.assertNotHasAttr(arr5, 'get_obj') # # @@ -2657,8 +2657,8 @@ def test_namespace(self): self.assertEqual((n.name, n.job), ('Bob', 'Builder')) del n.job self.assertEqual(str(n), "Namespace(name='Bob')") - self.assertTrue(hasattr(n, 'name')) - self.assertTrue(not hasattr(n, 'job')) + self.assertHasAttr(n, 'name') + self.assertNotHasAttr(n, 'job') # # @@ -4938,13 +4938,9 @@ def test_import(self): for name in modules: __import__(name) mod = sys.modules[name] - self.assertTrue(hasattr(mod, '__all__'), name) - + self.assertHasAttr(mod, '__all__', name) for attr in mod.__all__: - self.assertTrue( - hasattr(mod, attr), - '%r does not have attribute %r' % (mod, attr) - ) + self.assertHasAttr(mod, attr) # # Quick test that logging works -- does not test logging output @@ -4957,7 +4953,7 @@ class _TestLogging(BaseTestCase): def test_enable_logging(self): logger = multiprocessing.get_logger() logger.setLevel(util.SUBWARNING) - self.assertTrue(logger is not None) + self.assertIsNotNone(logger) logger.debug('this will not be printed') logger.info('nor will this') logger.setLevel(LOG_LEVEL) @@ -5753,9 +5749,8 @@ def test_set_get(self): self.assertEqual(multiprocessing.get_start_method(), method) ctx = multiprocessing.get_context() self.assertEqual(ctx.get_start_method(), method) - self.assertTrue(type(ctx).__name__.lower().startswith(method)) - self.assertTrue( - ctx.Process.__name__.lower().startswith(method)) + self.assertStartsWith(type(ctx).__name__.lower(), method) + self.assertStartsWith(ctx.Process.__name__.lower(), method) self.check_context(multiprocessing) count += 1 finally: @@ -5956,9 +5951,9 @@ def check_resource_tracker_death(self, signum, should_die): if should_die: self.assertEqual(len(all_warn), 1) the_warn = all_warn[0] - self.assertTrue(issubclass(the_warn.category, UserWarning)) - self.assertTrue("resource_tracker: process died" - in str(the_warn.message)) + self.assertIsSubclass(the_warn.category, UserWarning) + self.assertIn("resource_tracker: process died", + str(the_warn.message)) else: self.assertEqual(len(all_warn), 0) @@ -6163,8 +6158,8 @@ def is_alive(self): Process=FailingForkProcess)) p.close() p.join() - self.assertFalse( - any(process.is_alive() for process in forked_processes)) + for process in forked_processes: + self.assertFalse(process.is_alive(), process) @hashlib_helper.requires_hashdigest('sha256') diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index 6df09d891433ea..6b9b21cf7f6a3c 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -187,7 +187,7 @@ class C(A): def test_open(testfn): - # SSLContext.load_dh_params uses _Py_fopen_obj rather than normal open() + # SSLContext.load_dh_params uses Py_fopen() rather than normal open() try: import ssl diff --git a/Lib/test/clinic.test.c b/Lib/test/clinic.test.c index e4f146c0841188..0dfcc281985100 100644 --- a/Lib/test/clinic.test.c +++ b/Lib/test/clinic.test.c @@ -4758,7 +4758,7 @@ static PyObject * Test_cls_with_param_impl(TestObj *self, PyTypeObject *cls, int a); static PyObject * -Test_cls_with_param(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Test_cls_with_param(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -4798,7 +4798,7 @@ Test_cls_with_param(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ if (a == -1 && PyErr_Occurred()) { goto exit; } - return_value = Test_cls_with_param_impl(self, cls, a); + return_value = Test_cls_with_param_impl((TestObj *)self, cls, a); exit: return return_value; @@ -4806,7 +4806,7 @@ Test_cls_with_param(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ static PyObject * Test_cls_with_param_impl(TestObj *self, PyTypeObject *cls, int a) -/*[clinic end generated code: output=83a391eea66d08f8 input=af158077bd237ef9]*/ +/*[clinic end generated code: output=7e893134a81fef92 input=af158077bd237ef9]*/ /*[clinic input] @@ -4908,18 +4908,18 @@ static PyObject * Test_cls_no_params_impl(TestObj *self, PyTypeObject *cls); static PyObject * -Test_cls_no_params(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Test_cls_no_params(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "cls_no_params() takes no arguments"); return NULL; } - return Test_cls_no_params_impl(self, cls); + return Test_cls_no_params_impl((TestObj *)self, cls); } static PyObject * Test_cls_no_params_impl(TestObj *self, PyTypeObject *cls) -/*[clinic end generated code: output=4d68b4652c144af3 input=e7e2e4e344e96a11]*/ +/*[clinic end generated code: output=8845de054449f40a input=e7e2e4e344e96a11]*/ /*[clinic input] @@ -4945,7 +4945,7 @@ Test_metho_not_default_return_converter(TestObj *self, PyObject *a) PyObject *return_value = NULL; int _return_value; - _return_value = Test_metho_not_default_return_converter_impl(self, a); + _return_value = Test_metho_not_default_return_converter_impl((TestObj *)self, a); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -4957,7 +4957,7 @@ Test_metho_not_default_return_converter(TestObj *self, PyObject *a) static int Test_metho_not_default_return_converter_impl(TestObj *self, PyObject *a) -/*[clinic end generated code: output=3350de11bd538007 input=428657129b521177]*/ +/*[clinic end generated code: output=b2cce75a7af2e6ce input=428657129b521177]*/ /*[clinic input] @@ -4983,7 +4983,7 @@ static PyObject * Test_an_metho_arg_named_arg_impl(TestObj *self, int arg); static PyObject * -Test_an_metho_arg_named_arg(TestObj *self, PyObject *arg_) +Test_an_metho_arg_named_arg(PyObject *self, PyObject *arg_) { PyObject *return_value = NULL; int arg; @@ -4992,7 +4992,7 @@ Test_an_metho_arg_named_arg(TestObj *self, PyObject *arg_) if (arg == -1 && PyErr_Occurred()) { goto exit; } - return_value = Test_an_metho_arg_named_arg_impl(self, arg); + return_value = Test_an_metho_arg_named_arg_impl((TestObj *)self, arg); exit: return return_value; @@ -5000,7 +5000,7 @@ Test_an_metho_arg_named_arg(TestObj *self, PyObject *arg_) static PyObject * Test_an_metho_arg_named_arg_impl(TestObj *self, int arg) -/*[clinic end generated code: output=9f04de4a62287e28 input=2a53a57cf5624f95]*/ +/*[clinic end generated code: output=38554f09950d07e7 input=2a53a57cf5624f95]*/ /*[clinic input] @@ -5289,14 +5289,14 @@ static PyObject * Test_meth_coexist_impl(TestObj *self); static PyObject * -Test_meth_coexist(TestObj *self, PyObject *Py_UNUSED(ignored)) +Test_meth_coexist(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return Test_meth_coexist_impl(self); + return Test_meth_coexist_impl((TestObj *)self); } static PyObject * Test_meth_coexist_impl(TestObj *self) -/*[clinic end generated code: output=808a293d0cd27439 input=2a1d75b5e6fec6dd]*/ +/*[clinic end generated code: output=7edf4e95b29f06fa input=2a1d75b5e6fec6dd]*/ /*[clinic input] @getter @@ -5317,14 +5317,14 @@ static PyObject * Test_property_get_impl(TestObj *self); static PyObject * -Test_property_get(TestObj *self, void *Py_UNUSED(context)) +Test_property_get(PyObject *self, void *Py_UNUSED(context)) { - return Test_property_get_impl(self); + return Test_property_get_impl((TestObj *)self); } static PyObject * Test_property_get_impl(TestObj *self) -/*[clinic end generated code: output=7cadd0f539805266 input=2d92b3449fbc7d2b]*/ +/*[clinic end generated code: output=b38d68abd3466a6e input=2d92b3449fbc7d2b]*/ /*[clinic input] @setter @@ -5345,18 +5345,18 @@ static int Test_property_set_impl(TestObj *self, PyObject *value); static int -Test_property_set(TestObj *self, PyObject *value, void *Py_UNUSED(context)) +Test_property_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; - return_value = Test_property_set_impl(self, value); + return_value = Test_property_set_impl((TestObj *)self, value); return return_value; } static int Test_property_set_impl(TestObj *self, PyObject *value) -/*[clinic end generated code: output=e4342fe9bb1d7817 input=3bc3f46a23c83a88]*/ +/*[clinic end generated code: output=49f925ab2a33b637 input=3bc3f46a23c83a88]*/ /*[clinic input] @setter @@ -5377,18 +5377,18 @@ static int Test_setter_first_with_docstr_set_impl(TestObj *self, PyObject *value); static int -Test_setter_first_with_docstr_set(TestObj *self, PyObject *value, void *Py_UNUSED(context)) +Test_setter_first_with_docstr_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; - return_value = Test_setter_first_with_docstr_set_impl(self, value); + return_value = Test_setter_first_with_docstr_set_impl((TestObj *)self, value); return return_value; } static int Test_setter_first_with_docstr_set_impl(TestObj *self, PyObject *value) -/*[clinic end generated code: output=e4d76b558a4061db input=31a045ce11bbe961]*/ +/*[clinic end generated code: output=5aaf44373c0af545 input=31a045ce11bbe961]*/ /*[clinic input] @getter @@ -5418,14 +5418,14 @@ static PyObject * Test_setter_first_with_docstr_get_impl(TestObj *self); static PyObject * -Test_setter_first_with_docstr_get(TestObj *self, void *Py_UNUSED(context)) +Test_setter_first_with_docstr_get(PyObject *self, void *Py_UNUSED(context)) { - return Test_setter_first_with_docstr_get_impl(self); + return Test_setter_first_with_docstr_get_impl((TestObj *)self); } static PyObject * Test_setter_first_with_docstr_get_impl(TestObj *self) -/*[clinic end generated code: output=749a30266f9fb443 input=10af4e43b3cb34dc]*/ +/*[clinic end generated code: output=fe6e3aa844a24920 input=10af4e43b3cb34dc]*/ /*[clinic input] output push @@ -5708,7 +5708,7 @@ Test__pyarg_parsestackandkeywords_impl(TestObj *self, PyTypeObject *cls, Py_ssize_t key_length); static PyObject * -Test__pyarg_parsestackandkeywords(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Test__pyarg_parsestackandkeywords(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -5731,7 +5731,7 @@ Test__pyarg_parsestackandkeywords(TestObj *self, PyTypeObject *cls, PyObject *co &key, &key_length)) { goto exit; } - return_value = Test__pyarg_parsestackandkeywords_impl(self, cls, key, key_length); + return_value = Test__pyarg_parsestackandkeywords_impl((TestObj *)self, cls, key, key_length); exit: return return_value; @@ -5741,7 +5741,7 @@ static PyObject * Test__pyarg_parsestackandkeywords_impl(TestObj *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length) -/*[clinic end generated code: output=4fda8a7f2547137c input=fc72ef4b4cfafabc]*/ +/*[clinic end generated code: output=7060c213d7b8200e input=fc72ef4b4cfafabc]*/ /*[clinic input] diff --git a/Lib/test/libregrtest/pgo.py b/Lib/test/libregrtest/pgo.py index f762345c88cde3..04803ddf64453c 100644 --- a/Lib/test/libregrtest/pgo.py +++ b/Lib/test/libregrtest/pgo.py @@ -19,7 +19,6 @@ 'test_datetime', 'test_decimal', 'test_difflib', - 'test_embed', 'test_float', 'test_fstring', 'test_functools', diff --git a/Lib/test/libregrtest/single.py b/Lib/test/libregrtest/single.py index 0e174f82abed28..54df688bbc470e 100644 --- a/Lib/test/libregrtest/single.py +++ b/Lib/test/libregrtest/single.py @@ -162,8 +162,8 @@ def test_func(): def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, display_failure: bool = True) -> None: # Handle exceptions, detect environment changes. - ansi = get_colors() - red, reset, yellow = ansi.RED, ansi.RESET, ansi.YELLOW + stdout = get_colors(file=sys.stdout) + stderr = get_colors(file=sys.stderr) # Reset the environment_altered flag to detect if a test altered # the environment @@ -184,18 +184,24 @@ def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, _load_run_test(result, runtests) except support.ResourceDenied as exc: if not quiet and not pgo: - print(f"{yellow}{test_name} skipped -- {exc}{reset}", flush=True) + print( + f"{stdout.YELLOW}{test_name} skipped -- {exc}{stdout.RESET}", + flush=True, + ) result.state = State.RESOURCE_DENIED return except unittest.SkipTest as exc: if not quiet and not pgo: - print(f"{yellow}{test_name} skipped -- {exc}{reset}", flush=True) + print( + f"{stdout.YELLOW}{test_name} skipped -- {exc}{stdout.RESET}", + flush=True, + ) result.state = State.SKIPPED return except support.TestFailedWithDetails as exc: - msg = f"{red}test {test_name} failed{reset}" + msg = f"{stderr.RED}test {test_name} failed{stderr.RESET}" if display_failure: - msg = f"{red}{msg} -- {exc}{reset}" + msg = f"{stderr.RED}{msg} -- {exc}{stderr.RESET}" print(msg, file=sys.stderr, flush=True) result.state = State.FAILED result.errors = exc.errors @@ -203,9 +209,9 @@ def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, result.stats = exc.stats return except support.TestFailed as exc: - msg = f"{red}test {test_name} failed{reset}" + msg = f"{stderr.RED}test {test_name} failed{stderr.RESET}" if display_failure: - msg = f"{red}{msg} -- {exc}{reset}" + msg = f"{stderr.RED}{msg} -- {exc}{stderr.RESET}" print(msg, file=sys.stderr, flush=True) result.state = State.FAILED result.stats = exc.stats @@ -220,8 +226,11 @@ def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, except: if not pgo: msg = traceback.format_exc() - print(f"{red}test {test_name} crashed -- {msg}{reset}", - file=sys.stderr, flush=True) + print( + f"{stderr.RED}test {test_name} crashed -- {msg}{stderr.RESET}", + file=sys.stderr, + flush=True, + ) result.state = State.UNCAUGHT_EXC return @@ -303,7 +312,7 @@ def run_single_test(test_name: TestName, runtests: RunTests) -> TestResult: If runtests.use_junit, xml_data is a list containing each generated testsuite element. """ - ansi = get_colors() + ansi = get_colors(file=sys.stderr) red, reset, yellow = ansi.BOLD_RED, ansi.RESET, ansi.YELLOW start_time = time.perf_counter() diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 42e7b876594fa7..230bb240c89f77 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -58,11 +58,15 @@ "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT", "Py_DEBUG", "exceeds_recursion_limit", "get_c_recursion_limit", "skip_on_s390x", - "without_optimizer", + "requires_jit_enabled", + "requires_jit_disabled", "force_not_colorized", + "force_not_colorized_test_class", + "make_clean_env", "BrokenIter", "in_systemd_nspawn_sync_suppressed", "run_no_yield_async_fn", "run_yielding_async_fn", "async_yield", + "reset_code", ] @@ -504,10 +508,10 @@ def requires_lzma(reason='requires lzma'): def has_no_debug_ranges(): try: - import _testinternalcapi + import _testcapi except ImportError: raise unittest.SkipTest("_testinternalcapi required") - config = _testinternalcapi.get_config() + return not _testcapi.config_get('code_debug_ranges') return not bool(config['code_debug_ranges']) def requires_debug_ranges(reason='requires co_positions / debug_ranges'): @@ -1283,6 +1287,12 @@ def requires_specialization_ft(test): _opcode.ENABLE_SPECIALIZATION_FT, "requires specialization")(test) +def reset_code(f: types.FunctionType) -> types.FunctionType: + """Clear all specializations, local instrumentation, and JIT code for the given function.""" + f.__code__ = f.__code__.replace() + return f + + #======================================================================= # Check for the presence of docstrings. @@ -2618,21 +2628,13 @@ def exceeds_recursion_limit(): Py_TRACE_REFS = hasattr(sys, 'getobjects') -# Decorator to disable optimizer while a function run -def without_optimizer(func): - try: - from _testinternalcapi import get_optimizer, set_optimizer - except ImportError: - return func - @functools.wraps(func) - def wrapper(*args, **kwargs): - save_opt = get_optimizer() - try: - set_optimizer(None) - return func(*args, **kwargs) - finally: - set_optimizer(save_opt) - return wrapper +try: + from _testinternalcapi import jit_enabled +except ImportError: + requires_jit_enabled = requires_jit_disabled = unittest.skip("requires _testinternalcapi") +else: + requires_jit_enabled = unittest.skipUnless(jit_enabled(), "requires JIT enabled") + requires_jit_disabled = unittest.skipIf(jit_enabled(), "requires JIT disabled") _BASE_COPY_SRC_DIR_IGNORED_NAMES = frozenset({ @@ -2832,30 +2834,54 @@ def is_slot_wrapper(name, value): yield name, True +@contextlib.contextmanager +def no_color(): + import _colorize + from .os_helper import EnvironmentVarGuard + + with ( + swap_attr(_colorize, "can_colorize", lambda file=None: False), + EnvironmentVarGuard() as env, + ): + for var in {"FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS"}: + env.unset(var) + env.set("NO_COLOR", "1") + yield + + def force_not_colorized(func): """Force the terminal not to be colorized.""" @functools.wraps(func) def wrapper(*args, **kwargs): - import _colorize - original_fn = _colorize.can_colorize - variables: dict[str, str | None] = { - "PYTHON_COLORS": None, "FORCE_COLOR": None, "NO_COLOR": None - } - try: - for key in variables: - variables[key] = os.environ.pop(key, None) - os.environ["NO_COLOR"] = "1" - _colorize.can_colorize = lambda: False + with no_color(): return func(*args, **kwargs) - finally: - _colorize.can_colorize = original_fn - del os.environ["NO_COLOR"] - for key, value in variables.items(): - if value is not None: - os.environ[key] = value return wrapper +def force_not_colorized_test_class(cls): + """Force the terminal not to be colorized for the entire test class.""" + original_setUpClass = cls.setUpClass + + @classmethod + @functools.wraps(cls.setUpClass) + def new_setUpClass(cls): + cls.enterClassContext(no_color()) + original_setUpClass() + + cls.setUpClass = new_setUpClass + return cls + + +def make_clean_env() -> dict[str, str]: + clean_env = os.environ.copy() + for k in clean_env.copy(): + if k.startswith("PYTHON"): + clean_env.pop(k) + clean_env.pop("FORCE_COLOR", None) + clean_env.pop("NO_COLOR", None) + return clean_env + + def initialized_with_pyrepl(): """Detect whether PyREPL was used during Python initialization.""" # If the main module has a __file__ attribute it's a Python module, which means PyREPL. diff --git a/Lib/test/support/os_helper.py b/Lib/test/support/os_helper.py index 8071c248b9b67e..15dcdc9b1fddfb 100644 --- a/Lib/test/support/os_helper.py +++ b/Lib/test/support/os_helper.py @@ -294,6 +294,33 @@ def skip_unless_working_chmod(test): return test if ok else unittest.skip(msg)(test) +@contextlib.contextmanager +def save_mode(path, *, quiet=False): + """Context manager that restores the mode (permissions) of *path* on exit. + + Arguments: + + path: Path of the file to restore the mode of. + + quiet: if False (the default), the context manager raises an exception + on error. Otherwise, it issues only a warning and keeps the current + working directory the same. + + """ + saved_mode = os.stat(path) + try: + yield + finally: + try: + os.chmod(path, saved_mode.st_mode) + except OSError as exc: + if not quiet: + raise + warnings.warn(f'tests may fail, unable to restore the mode of ' + f'{path!r} to {saved_mode.st_mode}: {exc}', + RuntimeWarning, stacklevel=3) + + # Check whether the current effective user has the capability to override # DAC (discretionary access control). Typically user root is able to # bypass file read, write, and execute permission checks. The capability diff --git a/Lib/test/support/venv.py b/Lib/test/support/venv.py index 78e6a51ec1815e..7bfb9e4f3c479f 100644 --- a/Lib/test/support/venv.py +++ b/Lib/test/support/venv.py @@ -6,6 +6,7 @@ import sys import sysconfig import tempfile +import unittest import venv @@ -68,3 +69,14 @@ def run(self, *args, **subprocess_args): raise else: return result + + +class VirtualEnvironmentMixin: + def venv(self, name=None, **venv_create_args): + venv_name = self.id() + if name: + venv_name += f'-{name}' + return VirtualEnvironment.from_tmpdir( + prefix=f'{venv_name}-venv-', + **venv_create_args, + ) diff --git a/Lib/test/test__colorize.py b/Lib/test/test__colorize.py index 1871775fa205a2..056a5306ced183 100644 --- a/Lib/test/test__colorize.py +++ b/Lib/test/test__colorize.py @@ -1,68 +1,134 @@ import contextlib +import io import sys import unittest import unittest.mock import _colorize -from test.support import force_not_colorized +from test.support.os_helper import EnvironmentVarGuard -ORIGINAL_CAN_COLORIZE = _colorize.can_colorize +@contextlib.contextmanager +def clear_env(): + with EnvironmentVarGuard() as mock_env: + for var in "FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS": + mock_env.unset(var) + yield mock_env -def setUpModule(): - _colorize.can_colorize = lambda: False - -def tearDownModule(): - _colorize.can_colorize = ORIGINAL_CAN_COLORIZE +def supports_virtual_terminal(): + if sys.platform == "win32": + return unittest.mock.patch("nt._supports_virtual_terminal", return_value=True) + else: + return contextlib.nullcontext() class TestColorizeFunction(unittest.TestCase): - @force_not_colorized def test_colorized_detection_checks_for_environment_variables(self): - flags = unittest.mock.MagicMock(ignore_environment=False) + def check(env, fallback, expected): + with (self.subTest(env=env, fallback=fallback), + clear_env() as mock_env): + mock_env.update(env) + isatty_mock.return_value = fallback + stdout_mock.isatty.return_value = fallback + self.assertEqual(_colorize.can_colorize(), expected) + with (unittest.mock.patch("os.isatty") as isatty_mock, + unittest.mock.patch("sys.stdout") as stdout_mock, + supports_virtual_terminal()): + stdout_mock.fileno.return_value = 1 + + for fallback in False, True: + check({}, fallback, fallback) + check({'TERM': 'dumb'}, fallback, False) + check({'TERM': 'xterm'}, fallback, fallback) + check({'TERM': ''}, fallback, fallback) + check({'FORCE_COLOR': '1'}, fallback, True) + check({'FORCE_COLOR': '0'}, fallback, True) + check({'FORCE_COLOR': ''}, fallback, fallback) + check({'NO_COLOR': '1'}, fallback, False) + check({'NO_COLOR': '0'}, fallback, False) + check({'NO_COLOR': ''}, fallback, fallback) + + check({'TERM': 'dumb', 'FORCE_COLOR': '1'}, False, True) + check({'FORCE_COLOR': '1', 'NO_COLOR': '1'}, True, False) + + for ignore_environment in False, True: + # Simulate running with or without `-E`. + flags = unittest.mock.MagicMock(ignore_environment=ignore_environment) + with unittest.mock.patch("sys.flags", flags): + check({'PYTHON_COLORS': '1'}, True, True) + check({'PYTHON_COLORS': '1'}, False, not ignore_environment) + check({'PYTHON_COLORS': '0'}, True, ignore_environment) + check({'PYTHON_COLORS': '0'}, False, False) + for fallback in False, True: + check({'PYTHON_COLORS': 'x'}, fallback, fallback) + check({'PYTHON_COLORS': ''}, fallback, fallback) + + check({'TERM': 'dumb', 'PYTHON_COLORS': '1'}, False, not ignore_environment) + check({'NO_COLOR': '1', 'PYTHON_COLORS': '1'}, False, not ignore_environment) + check({'FORCE_COLOR': '1', 'PYTHON_COLORS': '0'}, True, ignore_environment) + + @unittest.skipUnless(sys.platform == "win32", "requires Windows") + def test_colorized_detection_checks_on_windows(self): + with (clear_env(), + unittest.mock.patch("os.isatty") as isatty_mock, + unittest.mock.patch("sys.stdout") as stdout_mock, + supports_virtual_terminal() as vt_mock): + stdout_mock.fileno.return_value = 1 + isatty_mock.return_value = True + stdout_mock.isatty.return_value = True + + vt_mock.return_value = True + self.assertEqual(_colorize.can_colorize(), True) + vt_mock.return_value = False + self.assertEqual(_colorize.can_colorize(), False) + import nt + del nt._supports_virtual_terminal + self.assertEqual(_colorize.can_colorize(), False) + + def test_colorized_detection_checks_for_std_streams(self): + with (clear_env(), + unittest.mock.patch("os.isatty") as isatty_mock, + unittest.mock.patch("sys.stdout") as stdout_mock, unittest.mock.patch("sys.stderr") as stderr_mock, - unittest.mock.patch("sys.flags", flags), - unittest.mock.patch("_colorize.can_colorize", ORIGINAL_CAN_COLORIZE), - (unittest.mock.patch("nt._supports_virtual_terminal", return_value=False) - if sys.platform == "win32" else - contextlib.nullcontext()) as vt_mock): + supports_virtual_terminal()): + stdout_mock.fileno.return_value = 1 + stderr_mock.fileno.side_effect = ZeroDivisionError + stderr_mock.isatty.side_effect = ZeroDivisionError isatty_mock.return_value = True - stderr_mock.fileno.return_value = 2 - stderr_mock.isatty.return_value = True - with unittest.mock.patch("os.environ", {'TERM': 'dumb'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", {'PYTHON_COLORS': '1'}): - self.assertEqual(_colorize.can_colorize(), True) - with unittest.mock.patch("os.environ", {'PYTHON_COLORS': '0'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", {'NO_COLOR': '1'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", - {'NO_COLOR': '1', "PYTHON_COLORS": '1'}): - self.assertEqual(_colorize.can_colorize(), True) - with unittest.mock.patch("os.environ", {'FORCE_COLOR': '1'}): - self.assertEqual(_colorize.can_colorize(), True) - with unittest.mock.patch("os.environ", - {'FORCE_COLOR': '1', 'NO_COLOR': '1'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", - {'FORCE_COLOR': '1', "PYTHON_COLORS": '0'}): - self.assertEqual(_colorize.can_colorize(), False) - - with unittest.mock.patch("os.environ", {}): - if sys.platform == "win32": - self.assertEqual(_colorize.can_colorize(), False) - - vt_mock.return_value = True - self.assertEqual(_colorize.can_colorize(), True) - else: - self.assertEqual(_colorize.can_colorize(), True) + stdout_mock.isatty.return_value = True + self.assertEqual(_colorize.can_colorize(), True) + + isatty_mock.return_value = False + stdout_mock.isatty.return_value = False + self.assertEqual(_colorize.can_colorize(), False) + def test_colorized_detection_checks_for_file(self): + with clear_env(), supports_virtual_terminal(): + + with unittest.mock.patch("os.isatty") as isatty_mock: + file = unittest.mock.MagicMock() + file.fileno.return_value = 1 + isatty_mock.return_value = True + self.assertEqual(_colorize.can_colorize(file=file), True) isatty_mock.return_value = False - stderr_mock.isatty.return_value = False - self.assertEqual(_colorize.can_colorize(), False) + self.assertEqual(_colorize.can_colorize(file=file), False) + + # No file.fileno. + with unittest.mock.patch("os.isatty", side_effect=ZeroDivisionError): + file = unittest.mock.MagicMock(spec=['isatty']) + file.isatty.return_value = True + self.assertEqual(_colorize.can_colorize(file=file), False) + + # file.fileno() raises io.UnsupportedOperation. + with unittest.mock.patch("os.isatty", side_effect=ZeroDivisionError): + file = unittest.mock.MagicMock() + file.fileno.side_effect = io.UnsupportedOperation + file.isatty.return_value = True + self.assertEqual(_colorize.can_colorize(file=file), True) + file.isatty.return_value = False + self.assertEqual(_colorize.can_colorize(file=file), False) if __name__ == "__main__": diff --git a/Lib/test/test__interpreters.py b/Lib/test/test__interpreters.py index bf3165e2341949..fd444f1f06ce48 100644 --- a/Lib/test/test__interpreters.py +++ b/Lib/test/test__interpreters.py @@ -557,7 +557,7 @@ def setUp(self): self.id = _interpreters.create() def test_signatures(self): - # for method in ['exec', 'run_string', 'run_func']: + # See https://github.com/python/cpython/issues/126654 msg = "expected 'shared' to be a dict" with self.assertRaisesRegex(TypeError, msg): _interpreters.exec(self.id, 'a', 1) @@ -568,6 +568,17 @@ def test_signatures(self): with self.assertRaisesRegex(TypeError, msg): _interpreters.run_func(self.id, lambda: None, shared=1) + def test_invalid_shared_encoding(self): + # See https://github.com/python/cpython/issues/127196 + bad_shared = {"\uD82A": 0} + msg = 'surrogates not allowed' + with self.assertRaisesRegex(UnicodeEncodeError, msg): + _interpreters.exec(self.id, 'a', shared=bad_shared) + with self.assertRaisesRegex(UnicodeEncodeError, msg): + _interpreters.run_string(self.id, 'a', shared=bad_shared) + with self.assertRaisesRegex(UnicodeEncodeError, msg): + _interpreters.run_func(self.id, lambda: None, shared=bad_shared) + class RunStringTests(TestBase): diff --git a/Lib/test/test__opcode.py b/Lib/test/test__opcode.py index d5cf014d40daf8..4b11e83ae59a78 100644 --- a/Lib/test/test__opcode.py +++ b/Lib/test/test__opcode.py @@ -38,6 +38,13 @@ def test_is_valid(self): opcodes = [dis.opmap[opname] for opname in names] self.check_bool_function_result(_opcode.is_valid, opcodes, True) + def test_opmaps(self): + def check_roundtrip(name, map): + return self.assertEqual(opcode.opname[map[name]], name) + + check_roundtrip('BINARY_OP', opcode.opmap) + check_roundtrip('BINARY_OP_ADD_INT', opcode._specialized_opmap) + def test_oplists(self): def check_function(self, func, expected): for op in [-10, 520]: @@ -58,8 +65,7 @@ def check_function(self, func, expected): class StackEffectTests(unittest.TestCase): def test_stack_effect(self): self.assertEqual(stack_effect(dis.opmap['POP_TOP']), -1) - self.assertEqual(stack_effect(dis.opmap['BUILD_SLICE'], 0), -1) - self.assertEqual(stack_effect(dis.opmap['BUILD_SLICE'], 1), -1) + self.assertEqual(stack_effect(dis.opmap['BUILD_SLICE'], 2), -1) self.assertEqual(stack_effect(dis.opmap['BUILD_SLICE'], 3), -2) self.assertRaises(ValueError, stack_effect, 30000) # All defined opcodes diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py index 5ce57cc209ea85..e90a8dc617c094 100644 --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -20,7 +20,7 @@ def test_abstractproperty_basics(self): def foo(self): pass self.assertTrue(foo.__isabstractmethod__) def bar(self): pass - self.assertFalse(hasattr(bar, "__isabstractmethod__")) + self.assertNotHasAttr(bar, "__isabstractmethod__") class C(metaclass=abc_ABCMeta): @abc.abstractproperty @@ -89,7 +89,7 @@ def test_abstractmethod_basics(self): def foo(self): pass self.assertTrue(foo.__isabstractmethod__) def bar(self): pass - self.assertFalse(hasattr(bar, "__isabstractmethod__")) + self.assertNotHasAttr(bar, "__isabstractmethod__") def test_abstractproperty_basics(self): @property @@ -276,21 +276,21 @@ class A(metaclass=abc_ABCMeta): class B(object): pass b = B() - self.assertFalse(issubclass(B, A)) - self.assertFalse(issubclass(B, (A,))) + self.assertNotIsSubclass(B, A) + self.assertNotIsSubclass(B, (A,)) self.assertNotIsInstance(b, A) self.assertNotIsInstance(b, (A,)) B1 = A.register(B) - self.assertTrue(issubclass(B, A)) - self.assertTrue(issubclass(B, (A,))) + self.assertIsSubclass(B, A) + self.assertIsSubclass(B, (A,)) self.assertIsInstance(b, A) self.assertIsInstance(b, (A,)) self.assertIs(B1, B) class C(B): pass c = C() - self.assertTrue(issubclass(C, A)) - self.assertTrue(issubclass(C, (A,))) + self.assertIsSubclass(C, A) + self.assertIsSubclass(C, (A,)) self.assertIsInstance(c, A) self.assertIsInstance(c, (A,)) @@ -301,16 +301,16 @@ class A(metaclass=abc_ABCMeta): class B(object): pass b = B() - self.assertTrue(issubclass(B, A)) - self.assertTrue(issubclass(B, (A,))) + self.assertIsSubclass(B, A) + self.assertIsSubclass(B, (A,)) self.assertIsInstance(b, A) self.assertIsInstance(b, (A,)) @A.register class C(B): pass c = C() - self.assertTrue(issubclass(C, A)) - self.assertTrue(issubclass(C, (A,))) + self.assertIsSubclass(C, A) + self.assertIsSubclass(C, (A,)) self.assertIsInstance(c, A) self.assertIsInstance(c, (A,)) self.assertIs(C, A.register(C)) @@ -321,14 +321,14 @@ class A(metaclass=abc_ABCMeta): class B: pass b = B() - self.assertFalse(isinstance(b, A)) - self.assertFalse(isinstance(b, (A,))) + self.assertNotIsInstance(b, A) + self.assertNotIsInstance(b, (A,)) token_old = abc_get_cache_token() A.register(B) token_new = abc_get_cache_token() self.assertGreater(token_new, token_old) - self.assertTrue(isinstance(b, A)) - self.assertTrue(isinstance(b, (A,))) + self.assertIsInstance(b, A) + self.assertIsInstance(b, (A,)) def test_registration_builtins(self): class A(metaclass=abc_ABCMeta): @@ -336,18 +336,18 @@ class A(metaclass=abc_ABCMeta): A.register(int) self.assertIsInstance(42, A) self.assertIsInstance(42, (A,)) - self.assertTrue(issubclass(int, A)) - self.assertTrue(issubclass(int, (A,))) + self.assertIsSubclass(int, A) + self.assertIsSubclass(int, (A,)) class B(A): pass B.register(str) class C(str): pass self.assertIsInstance("", A) self.assertIsInstance("", (A,)) - self.assertTrue(issubclass(str, A)) - self.assertTrue(issubclass(str, (A,))) - self.assertTrue(issubclass(C, A)) - self.assertTrue(issubclass(C, (A,))) + self.assertIsSubclass(str, A) + self.assertIsSubclass(str, (A,)) + self.assertIsSubclass(C, A) + self.assertIsSubclass(C, (A,)) def test_registration_edge_cases(self): class A(metaclass=abc_ABCMeta): @@ -375,39 +375,39 @@ class A(metaclass=abc_ABCMeta): def test_registration_transitiveness(self): class A(metaclass=abc_ABCMeta): pass - self.assertTrue(issubclass(A, A)) - self.assertTrue(issubclass(A, (A,))) + self.assertIsSubclass(A, A) + self.assertIsSubclass(A, (A,)) class B(metaclass=abc_ABCMeta): pass - self.assertFalse(issubclass(A, B)) - self.assertFalse(issubclass(A, (B,))) - self.assertFalse(issubclass(B, A)) - self.assertFalse(issubclass(B, (A,))) + self.assertNotIsSubclass(A, B) + self.assertNotIsSubclass(A, (B,)) + self.assertNotIsSubclass(B, A) + self.assertNotIsSubclass(B, (A,)) class C(metaclass=abc_ABCMeta): pass A.register(B) class B1(B): pass - self.assertTrue(issubclass(B1, A)) - self.assertTrue(issubclass(B1, (A,))) + self.assertIsSubclass(B1, A) + self.assertIsSubclass(B1, (A,)) class C1(C): pass B1.register(C1) - self.assertFalse(issubclass(C, B)) - self.assertFalse(issubclass(C, (B,))) - self.assertFalse(issubclass(C, B1)) - self.assertFalse(issubclass(C, (B1,))) - self.assertTrue(issubclass(C1, A)) - self.assertTrue(issubclass(C1, (A,))) - self.assertTrue(issubclass(C1, B)) - self.assertTrue(issubclass(C1, (B,))) - self.assertTrue(issubclass(C1, B1)) - self.assertTrue(issubclass(C1, (B1,))) + self.assertNotIsSubclass(C, B) + self.assertNotIsSubclass(C, (B,)) + self.assertNotIsSubclass(C, B1) + self.assertNotIsSubclass(C, (B1,)) + self.assertIsSubclass(C1, A) + self.assertIsSubclass(C1, (A,)) + self.assertIsSubclass(C1, B) + self.assertIsSubclass(C1, (B,)) + self.assertIsSubclass(C1, B1) + self.assertIsSubclass(C1, (B1,)) C1.register(int) class MyInt(int): pass - self.assertTrue(issubclass(MyInt, A)) - self.assertTrue(issubclass(MyInt, (A,))) + self.assertIsSubclass(MyInt, A) + self.assertIsSubclass(MyInt, (A,)) self.assertIsInstance(42, A) self.assertIsInstance(42, (A,)) @@ -467,16 +467,16 @@ def __subclasshook__(cls, C): if cls is A: return 'foo' in C.__dict__ return NotImplemented - self.assertFalse(issubclass(A, A)) - self.assertFalse(issubclass(A, (A,))) + self.assertNotIsSubclass(A, A) + self.assertNotIsSubclass(A, (A,)) class B: foo = 42 - self.assertTrue(issubclass(B, A)) - self.assertTrue(issubclass(B, (A,))) + self.assertIsSubclass(B, A) + self.assertIsSubclass(B, (A,)) class C: spam = 42 - self.assertFalse(issubclass(C, A)) - self.assertFalse(issubclass(C, (A,))) + self.assertNotIsSubclass(C, A) + self.assertNotIsSubclass(C, (A,)) def test_all_new_methods_are_called(self): class A(metaclass=abc_ABCMeta): @@ -493,7 +493,7 @@ class C(A, B): self.assertEqual(B.counter, 1) def test_ABC_has___slots__(self): - self.assertTrue(hasattr(abc.ABC, '__slots__')) + self.assertHasAttr(abc.ABC, '__slots__') def test_tricky_new_works(self): def with_metaclass(meta, *bases): @@ -515,7 +515,7 @@ def foo(self): del A.foo self.assertEqual(A.__abstractmethods__, {'foo'}) - self.assertFalse(hasattr(A, 'foo')) + self.assertNotHasAttr(A, 'foo') abc.update_abstractmethods(A) @@ -588,7 +588,7 @@ def updated_foo(self): A.foo = updated_foo abc.update_abstractmethods(A) A() - self.assertFalse(hasattr(A, '__abstractmethods__')) + self.assertNotHasAttr(A, '__abstractmethods__') def test_update_del_implementation(self): class A(metaclass=abc_ABCMeta): diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py index f621f343eb062a..58ea89c4fac833 100755 --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -1665,5 +1665,13 @@ def test_tolist(self, size): self.assertEqual(ls[:8], list(example[:8])) self.assertEqual(ls[-8:], list(example[-8:])) + def test_gh_128961(self): + a = array.array('i') + it = iter(a) + list(it) + it.__setstate__(0) + self.assertRaises(StopIteration, next, it) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py index 5bfd789185c675..b81187871753b9 100644 --- a/Lib/test/test_asyncgen.py +++ b/Lib/test/test_asyncgen.py @@ -1152,6 +1152,23 @@ async def run(): self.loop.run_until_complete(run()) + def test_async_gen_asyncio_anext_tuple_no_exceptions(self): + # StopAsyncIteration exceptions should be cleared. + # See: https://github.com/python/cpython/issues/128078. + + async def foo(): + if False: + yield (1, 2) + + async def run(): + it = foo().__aiter__() + with self.assertRaises(StopAsyncIteration): + await it.__anext__() + res = await anext(it, ('a', 'b')) + self.assertTupleEqual(res, ('a', 'b')) + + self.loop.run_until_complete(run()) + def test_async_gen_asyncio_anext_stopiteration(self): async def foo(): try: diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 1e063c1352ecb9..8cf1f6891faf97 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -833,8 +833,8 @@ async def test(): loop.close() def test_create_named_task_with_custom_factory(self): - def task_factory(loop, coro): - return asyncio.Task(coro, loop=loop) + def task_factory(loop, coro, **kwargs): + return asyncio.Task(coro, loop=loop, **kwargs) async def test(): pass @@ -1345,7 +1345,7 @@ def getaddrinfo_task(*args, **kwds): with self.assertRaises(OSError) as cm: self.loop.run_until_complete(coro) - self.assertTrue(str(cm.exception).startswith('Multiple exceptions: ')) + self.assertStartsWith(str(cm.exception), 'Multiple exceptions: ') self.assertTrue(m_socket.socket.return_value.close.called) coro = self.loop.create_connection( diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py index dcf9ff716ad399..10450c11b68279 100644 --- a/Lib/test/test_asyncio/test_eager_task_factory.py +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -302,6 +302,18 @@ async def run(): self.run_coro(run()) + def test_name(self): + name = None + async def coro(): + nonlocal name + name = asyncio.current_task().get_name() + + async def main(): + task = self.loop.create_task(coro(), name="test name") + self.assertEqual(name, "test name") + await task + + self.run_coro(coro()) class AsyncTaskCounter: def __init__(self, loop, *, task_class, eager): diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index c8439c9af5e6ba..ada049e9c7d387 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -353,6 +353,124 @@ def run_in_thread(): t.join() self.assertEqual(results, ['hello', 'world']) + def test_call_soon_threadsafe_handle_block_check_cancelled(self): + results = [] + + callback_started = threading.Event() + callback_finished = threading.Event() + def callback(arg): + callback_started.set() + results.append(arg) + time.sleep(1) + callback_finished.set() + + def run_in_thread(): + handle = self.loop.call_soon_threadsafe(callback, 'hello') + self.assertIsInstance(handle, events._ThreadSafeHandle) + callback_started.wait() + # callback started so it should block checking for cancellation + # until it finishes + self.assertFalse(handle.cancelled()) + self.assertTrue(callback_finished.is_set()) + self.loop.call_soon_threadsafe(self.loop.stop) + + t = threading.Thread(target=run_in_thread) + t.start() + + self.loop.run_forever() + t.join() + self.assertEqual(results, ['hello']) + + def test_call_soon_threadsafe_handle_block_cancellation(self): + results = [] + + callback_started = threading.Event() + callback_finished = threading.Event() + def callback(arg): + callback_started.set() + results.append(arg) + time.sleep(1) + callback_finished.set() + + def run_in_thread(): + handle = self.loop.call_soon_threadsafe(callback, 'hello') + self.assertIsInstance(handle, events._ThreadSafeHandle) + callback_started.wait() + # callback started so it cannot be cancelled from other thread until + # it finishes + handle.cancel() + self.assertTrue(callback_finished.is_set()) + self.loop.call_soon_threadsafe(self.loop.stop) + + t = threading.Thread(target=run_in_thread) + t.start() + + self.loop.run_forever() + t.join() + self.assertEqual(results, ['hello']) + + def test_call_soon_threadsafe_handle_cancel_same_thread(self): + results = [] + callback_started = threading.Event() + callback_finished = threading.Event() + + fut = concurrent.futures.Future() + def callback(arg): + callback_started.set() + handle = fut.result() + handle.cancel() + results.append(arg) + callback_finished.set() + self.loop.stop() + + def run_in_thread(): + handle = self.loop.call_soon_threadsafe(callback, 'hello') + fut.set_result(handle) + self.assertIsInstance(handle, events._ThreadSafeHandle) + callback_started.wait() + # callback cancels itself from same thread so it has no effect + # it runs to completion + self.assertTrue(handle.cancelled()) + self.assertTrue(callback_finished.is_set()) + self.loop.call_soon_threadsafe(self.loop.stop) + + t = threading.Thread(target=run_in_thread) + t.start() + + self.loop.run_forever() + t.join() + self.assertEqual(results, ['hello']) + + def test_call_soon_threadsafe_handle_cancel_other_thread(self): + results = [] + ev = threading.Event() + + callback_finished = threading.Event() + def callback(arg): + results.append(arg) + callback_finished.set() + self.loop.stop() + + def run_in_thread(): + handle = self.loop.call_soon_threadsafe(callback, 'hello') + # handle can be cancelled from other thread if not started yet + self.assertIsInstance(handle, events._ThreadSafeHandle) + handle.cancel() + self.assertTrue(handle.cancelled()) + self.assertFalse(callback_finished.is_set()) + ev.set() + self.loop.call_soon_threadsafe(self.loop.stop) + + # block the main loop until the callback is added and cancelled in the + # other thread + self.loop.call_soon(ev.wait) + t = threading.Thread(target=run_in_thread) + t.start() + self.loop.run_forever() + t.join() + self.assertEqual(results, []) + self.assertFalse(callback_finished.is_set()) + def test_call_soon_threadsafe_same_thread(self): results = [] @@ -2066,7 +2184,7 @@ def test_subprocess_stderr(self): transp.close() self.assertEqual(b'OUT:test', proto.data[1]) - self.assertTrue(proto.data[2].startswith(b'ERR:test'), proto.data[2]) + self.assertStartsWith(proto.data[2], b'ERR:test') self.assertEqual(0, proto.returncode) @support.requires_subprocess() @@ -2088,8 +2206,7 @@ def test_subprocess_stderr_redirect_to_stdout(self): stdin.write(b'test') self.loop.run_until_complete(proto.completed) - self.assertTrue(proto.data[1].startswith(b'OUT:testERR:test'), - proto.data[1]) + self.assertStartsWith(proto.data[1], b'OUT:testERR:test') self.assertEqual(b'', proto.data[2]) transp.close() diff --git a/Lib/test/test_asyncio/test_free_threading.py b/Lib/test/test_asyncio/test_free_threading.py index 90bddbf3a9dda1..6da398e77e7797 100644 --- a/Lib/test/test_asyncio/test_free_threading.py +++ b/Lib/test/test_asyncio/test_free_threading.py @@ -1,4 +1,5 @@ import asyncio +import threading import unittest from threading import Thread from unittest import TestCase @@ -7,6 +8,11 @@ threading_helper.requires_working_threading(module=True) + +class MyException(Exception): + pass + + def tearDownModule(): asyncio._set_event_loop_policy(None) @@ -53,13 +59,98 @@ def runner(): with threading_helper.start_threads(threads): pass + def test_all_tasks_different_thread(self) -> None: + loop = None + started = threading.Event() + done = threading.Event() # used for main task not finishing early + async def coro(): + await asyncio.Future() + + lock = threading.Lock() + tasks = set() + + async def main(): + nonlocal tasks, loop + loop = asyncio.get_running_loop() + started.set() + for i in range(1000): + with lock: + asyncio.create_task(coro()) + tasks = self.all_tasks(loop) + done.wait() + + runner = threading.Thread(target=lambda: asyncio.run(main())) + + def check(): + started.wait() + with lock: + self.assertSetEqual(tasks & self.all_tasks(loop), tasks) + + threads = [threading.Thread(target=check) for _ in range(10)] + runner.start() + + with threading_helper.start_threads(threads): + pass + + done.set() + runner.join() + + def test_run_coroutine_threadsafe(self) -> None: + results = [] + + def in_thread(loop: asyncio.AbstractEventLoop): + coro = asyncio.sleep(0.1, result=42) + fut = asyncio.run_coroutine_threadsafe(coro, loop) + result = fut.result() + self.assertEqual(result, 42) + results.append(result) + + async def main(): + loop = asyncio.get_running_loop() + async with asyncio.TaskGroup() as tg: + for _ in range(10): + tg.create_task(asyncio.to_thread(in_thread, loop)) + self.assertEqual(results, [42] * 10) + + with asyncio.Runner() as r: + loop = r.get_loop() + loop.set_task_factory(self.factory) + r.run(main()) + + def test_run_coroutine_threadsafe_exception(self) -> None: + async def coro(): + await asyncio.sleep(0) + raise MyException("test") + + def in_thread(loop: asyncio.AbstractEventLoop): + fut = asyncio.run_coroutine_threadsafe(coro(), loop) + return fut.result() + + async def main(): + loop = asyncio.get_running_loop() + tasks = [] + for _ in range(10): + task = loop.create_task(asyncio.to_thread(in_thread, loop)) + tasks.append(task) + results = await asyncio.gather(*tasks, return_exceptions=True) + + self.assertEqual(len(results), 10) + for result in results: + self.assertIsInstance(result, MyException) + self.assertEqual(str(result), "test") + + with asyncio.Runner() as r: + loop = r.get_loop() + loop.set_task_factory(self.factory) + r.run(main()) + class TestPyFreeThreading(TestFreeThreading, TestCase): all_tasks = staticmethod(asyncio.tasks._py_all_tasks) current_task = staticmethod(asyncio.tasks._py_current_task) - def factory(self, loop, coro, context=None): - return asyncio.tasks._PyTask(coro, loop=loop, context=context) + def factory(self, loop, coro, **kwargs): + return asyncio.tasks._PyTask(coro, loop=loop, **kwargs) @unittest.skipUnless(hasattr(asyncio.tasks, "_c_all_tasks"), "requires _asyncio") @@ -67,16 +158,16 @@ class TestCFreeThreading(TestFreeThreading, TestCase): all_tasks = staticmethod(getattr(asyncio.tasks, "_c_all_tasks", None)) current_task = staticmethod(getattr(asyncio.tasks, "_c_current_task", None)) - def factory(self, loop, coro, context=None): - return asyncio.tasks._CTask(coro, loop=loop, context=context) + def factory(self, loop, coro, **kwargs): + return asyncio.tasks._CTask(coro, loop=loop, **kwargs) class TestEagerPyFreeThreading(TestPyFreeThreading): - def factory(self, loop, coro, context=None): - return asyncio.tasks._PyTask(coro, loop=loop, context=context, eager_start=True) + def factory(self, loop, coro, eager_start=True, **kwargs): + return asyncio.tasks._PyTask(coro, loop=loop, **kwargs, eager_start=eager_start) @unittest.skipUnless(hasattr(asyncio.tasks, "_c_all_tasks"), "requires _asyncio") class TestEagerCFreeThreading(TestCFreeThreading, TestCase): - def factory(self, loop, coro, context=None): - return asyncio.tasks._CTask(coro, loop=loop, context=context, eager_start=True) + def factory(self, loop, coro, eager_start=True, **kwargs): + return asyncio.tasks._CTask(coro, loop=loop, **kwargs, eager_start=eager_start) diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 84b44011b9a844..01d6230e6dd9a3 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -242,7 +242,7 @@ def test_uninitialized(self): def test_future_cancel_message_getter(self): f = self._new_future(loop=self.loop) - self.assertTrue(hasattr(f, '_cancel_message')) + self.assertHasAttr(f, '_cancel_message') self.assertEqual(f._cancel_message, None) f.cancel('my message') diff --git a/Lib/test/test_asyncio/test_graph.py b/Lib/test/test_asyncio/test_graph.py new file mode 100644 index 00000000000000..fd2160d4ca3137 --- /dev/null +++ b/Lib/test/test_asyncio/test_graph.py @@ -0,0 +1,436 @@ +import asyncio +import io +import unittest + + +# To prevent a warning "test altered the execution environment" +def tearDownModule(): + asyncio._set_event_loop_policy(None) + + +def capture_test_stack(*, fut=None, depth=1): + + def walk(s): + ret = [ + (f"T<{n}>" if '-' not in (n := s.future.get_name()) else 'T') + if isinstance(s.future, asyncio.Task) else 'F' + ] + + ret.append( + [ + ( + f"s {entry.frame.f_code.co_name}" + if entry.frame.f_generator is None else + ( + f"a {entry.frame.f_generator.cr_code.co_name}" + if hasattr(entry.frame.f_generator, 'cr_code') else + f"ag {entry.frame.f_generator.ag_code.co_name}" + ) + ) for entry in s.call_stack + ] + ) + + ret.append( + sorted([ + walk(ab) for ab in s.awaited_by + ], key=lambda entry: entry[0]) + ) + + return ret + + buf = io.StringIO() + asyncio.print_call_graph(fut, file=buf, depth=depth+1) + + stack = asyncio.capture_call_graph(fut, depth=depth) + return walk(stack), buf.getvalue() + + +class CallStackTestBase: + + async def test_stack_tgroup(self): + + stack_for_c5 = None + + def c5(): + nonlocal stack_for_c5 + stack_for_c5 = capture_test_stack(depth=2) + + async def c4(): + await asyncio.sleep(0) + c5() + + async def c3(): + await c4() + + async def c2(): + await c3() + + async def c1(task): + await task + + async def main(): + async with asyncio.TaskGroup() as tg: + task = tg.create_task(c2(), name="c2_root") + tg.create_task(c1(task), name="sub_main_1") + tg.create_task(c1(task), name="sub_main_2") + + await main() + + self.assertEqual(stack_for_c5[0], [ + # task name + 'T', + # call stack + ['s c5', 'a c4', 'a c3', 'a c2'], + # awaited by + [ + ['T', + ['a _aexit', 'a __aexit__', 'a main', 'a test_stack_tgroup'], [] + ], + ['T', + ['a c1'], + [ + ['T', + ['a _aexit', 'a __aexit__', 'a main', 'a test_stack_tgroup'], [] + ] + ] + ], + ['T', + ['a c1'], + [ + ['T', + ['a _aexit', 'a __aexit__', 'a main', 'a test_stack_tgroup'], [] + ] + ] + ] + ] + ]) + + self.assertIn( + ' async CallStackTestBase.test_stack_tgroup()', + stack_for_c5[1]) + + + async def test_stack_async_gen(self): + + stack_for_gen_nested_call = None + + async def gen_nested_call(): + nonlocal stack_for_gen_nested_call + stack_for_gen_nested_call = capture_test_stack() + + async def gen(): + for num in range(2): + yield num + if num == 1: + await gen_nested_call() + + async def main(): + async for el in gen(): + pass + + await main() + + self.assertEqual(stack_for_gen_nested_call[0], [ + 'T', + [ + 's capture_test_stack', + 'a gen_nested_call', + 'ag gen', + 'a main', + 'a test_stack_async_gen' + ], + [] + ]) + + self.assertIn( + 'async generator CallStackTestBase.test_stack_async_gen..gen()', + stack_for_gen_nested_call[1]) + + async def test_stack_gather(self): + + stack_for_deep = None + + async def deep(): + await asyncio.sleep(0) + nonlocal stack_for_deep + stack_for_deep = capture_test_stack() + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def c2(): + await asyncio.sleep(0) + + async def main(): + await asyncio.gather(c1(), c2()) + + await main() + + self.assertEqual(stack_for_deep[0], [ + 'T', + ['s capture_test_stack', 'a deep', 'a c1'], + [ + ['T', ['a main', 'a test_stack_gather'], []] + ] + ]) + + async def test_stack_shield(self): + + stack_for_shield = None + + async def deep(): + await asyncio.sleep(0) + nonlocal stack_for_shield + stack_for_shield = capture_test_stack() + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def main(): + await asyncio.shield(c1()) + + await main() + + self.assertEqual(stack_for_shield[0], [ + 'T', + ['s capture_test_stack', 'a deep', 'a c1'], + [ + ['T', ['a main', 'a test_stack_shield'], []] + ] + ]) + + async def test_stack_timeout(self): + + stack_for_inner = None + + async def inner(): + await asyncio.sleep(0) + nonlocal stack_for_inner + stack_for_inner = capture_test_stack() + + async def c1(): + async with asyncio.timeout(1): + await asyncio.sleep(0) + await inner() + + async def main(): + await asyncio.shield(c1()) + + await main() + + self.assertEqual(stack_for_inner[0], [ + 'T', + ['s capture_test_stack', 'a inner', 'a c1'], + [ + ['T', ['a main', 'a test_stack_timeout'], []] + ] + ]) + + async def test_stack_wait(self): + + stack_for_inner = None + + async def inner(): + await asyncio.sleep(0) + nonlocal stack_for_inner + stack_for_inner = capture_test_stack() + + async def c1(): + async with asyncio.timeout(1): + await asyncio.sleep(0) + await inner() + + async def c2(): + for i in range(3): + await asyncio.sleep(0) + + async def main(t1, t2): + while True: + _, pending = await asyncio.wait([t1, t2]) + if not pending: + break + + t1 = asyncio.create_task(c1()) + t2 = asyncio.create_task(c2()) + try: + await main(t1, t2) + finally: + await t1 + await t2 + + self.assertEqual(stack_for_inner[0], [ + 'T', + ['s capture_test_stack', 'a inner', 'a c1'], + [ + ['T', + ['a _wait', 'a wait', 'a main', 'a test_stack_wait'], + [] + ] + ] + ]) + + async def test_stack_task(self): + + stack_for_inner = None + + async def inner(): + await asyncio.sleep(0) + nonlocal stack_for_inner + stack_for_inner = capture_test_stack() + + async def c1(): + await inner() + + async def c2(): + await asyncio.create_task(c1(), name='there there') + + async def main(): + await c2() + + await main() + + self.assertEqual(stack_for_inner[0], [ + 'T', + ['s capture_test_stack', 'a inner', 'a c1'], + [['T', ['a c2', 'a main', 'a test_stack_task'], []]] + ]) + + async def test_stack_future(self): + + stack_for_fut = None + + async def a2(fut): + await fut + + async def a1(fut): + await a2(fut) + + async def b1(fut): + await fut + + async def main(): + nonlocal stack_for_fut + + fut = asyncio.Future() + async with asyncio.TaskGroup() as g: + g.create_task(a1(fut), name="task A") + g.create_task(b1(fut), name='task B') + + for _ in range(5): + # Do a few iterations to ensure that both a1 and b1 + # await on the future + await asyncio.sleep(0) + + stack_for_fut = capture_test_stack(fut=fut) + fut.set_result(None) + + await main() + + self.assertEqual(stack_for_fut[0], + ['F', + [], + [ + ['T', + ['a a2', 'a a1'], + [['T', ['a test_stack_future'], []]] + ], + ['T', + ['a b1'], + [['T', ['a test_stack_future'], []]] + ], + ]] + ) + + self.assertTrue(stack_for_fut[1].startswith('* Future(id=')) + + +@unittest.skipIf( + not hasattr(asyncio.futures, "_c_future_add_to_awaited_by"), + "C-accelerated asyncio call graph backend missing", +) +class TestCallStackC(CallStackTestBase, unittest.IsolatedAsyncioTestCase): + def setUp(self): + futures = asyncio.futures + tasks = asyncio.tasks + + self._Future = asyncio.Future + asyncio.Future = futures.Future = futures._CFuture + + self._Task = asyncio.Task + asyncio.Task = tasks.Task = tasks._CTask + + self._future_add_to_awaited_by = asyncio.future_add_to_awaited_by + futures.future_add_to_awaited_by = futures._c_future_add_to_awaited_by + asyncio.future_add_to_awaited_by = futures.future_add_to_awaited_by + + self._future_discard_from_awaited_by = asyncio.future_discard_from_awaited_by + futures.future_discard_from_awaited_by = futures._c_future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = futures.future_discard_from_awaited_by + + + def tearDown(self): + futures = asyncio.futures + tasks = asyncio.tasks + + futures.future_discard_from_awaited_by = self._future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = self._future_discard_from_awaited_by + del self._future_discard_from_awaited_by + + futures.future_add_to_awaited_by = self._future_add_to_awaited_by + asyncio.future_add_to_awaited_by = self._future_add_to_awaited_by + del self._future_add_to_awaited_by + + asyncio.Task = self._Task + tasks.Task = self._Task + del self._Task + + asyncio.Future = self._Future + futures.Future = self._Future + del self._Future + + +@unittest.skipIf( + not hasattr(asyncio.futures, "_py_future_add_to_awaited_by"), + "Pure Python asyncio call graph backend missing", +) +class TestCallStackPy(CallStackTestBase, unittest.IsolatedAsyncioTestCase): + def setUp(self): + futures = asyncio.futures + tasks = asyncio.tasks + + self._Future = asyncio.Future + asyncio.Future = futures.Future = futures._PyFuture + + self._Task = asyncio.Task + asyncio.Task = tasks.Task = tasks._PyTask + + self._future_add_to_awaited_by = asyncio.future_add_to_awaited_by + futures.future_add_to_awaited_by = futures._py_future_add_to_awaited_by + asyncio.future_add_to_awaited_by = futures.future_add_to_awaited_by + + self._future_discard_from_awaited_by = asyncio.future_discard_from_awaited_by + futures.future_discard_from_awaited_by = futures._py_future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = futures.future_discard_from_awaited_by + + + def tearDown(self): + futures = asyncio.futures + tasks = asyncio.tasks + + futures.future_discard_from_awaited_by = self._future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = self._future_discard_from_awaited_by + del self._future_discard_from_awaited_by + + futures.future_add_to_awaited_by = self._future_add_to_awaited_by + asyncio.future_add_to_awaited_by = self._future_add_to_awaited_by + del self._future_add_to_awaited_by + + asyncio.Task = self._Task + tasks.Task = self._Task + del self._Task + + asyncio.Future = self._Future + futures.Future = self._Future + del self._Future diff --git a/Lib/test/test_asyncio/test_locks.py b/Lib/test/test_asyncio/test_locks.py index aabfcd418829b2..3bb3e5c4ca0658 100644 --- a/Lib/test/test_asyncio/test_locks.py +++ b/Lib/test/test_asyncio/test_locks.py @@ -27,11 +27,11 @@ class LockTests(unittest.IsolatedAsyncioTestCase): async def test_repr(self): lock = asyncio.Lock() - self.assertTrue(repr(lock).endswith('[unlocked]>')) + self.assertEndsWith(repr(lock), '[unlocked]>') self.assertTrue(RGX_REPR.match(repr(lock))) await lock.acquire() - self.assertTrue(repr(lock).endswith('[locked]>')) + self.assertEndsWith(repr(lock), '[locked]>') self.assertTrue(RGX_REPR.match(repr(lock))) async def test_lock(self): @@ -286,12 +286,12 @@ class EventTests(unittest.IsolatedAsyncioTestCase): def test_repr(self): ev = asyncio.Event() - self.assertTrue(repr(ev).endswith('[unset]>')) + self.assertEndsWith(repr(ev), '[unset]>') match = RGX_REPR.match(repr(ev)) self.assertEqual(match.group('extras'), 'unset') ev.set() - self.assertTrue(repr(ev).endswith('[set]>')) + self.assertEndsWith(repr(ev), '[set]>') self.assertTrue(RGX_REPR.match(repr(ev))) ev._waiters.append(mock.Mock()) @@ -916,11 +916,11 @@ def test_initial_value_zero(self): async def test_repr(self): sem = asyncio.Semaphore() - self.assertTrue(repr(sem).endswith('[unlocked, value:1]>')) + self.assertEndsWith(repr(sem), '[unlocked, value:1]>') self.assertTrue(RGX_REPR.match(repr(sem))) await sem.acquire() - self.assertTrue(repr(sem).endswith('[locked]>')) + self.assertEndsWith(repr(sem), '[locked]>') self.assertTrue('waiters' not in repr(sem)) self.assertTrue(RGX_REPR.match(repr(sem))) diff --git a/Lib/test/test_asyncio/test_protocols.py b/Lib/test/test_asyncio/test_protocols.py index a8627b5b5b87f2..4484a031988533 100644 --- a/Lib/test/test_asyncio/test_protocols.py +++ b/Lib/test/test_asyncio/test_protocols.py @@ -19,7 +19,7 @@ def test_base_protocol(self): self.assertIsNone(p.connection_lost(f)) self.assertIsNone(p.pause_writing()) self.assertIsNone(p.resume_writing()) - self.assertFalse(hasattr(p, '__dict__')) + self.assertNotHasAttr(p, '__dict__') def test_protocol(self): f = mock.Mock() @@ -30,7 +30,7 @@ def test_protocol(self): self.assertIsNone(p.eof_received()) self.assertIsNone(p.pause_writing()) self.assertIsNone(p.resume_writing()) - self.assertFalse(hasattr(p, '__dict__')) + self.assertNotHasAttr(p, '__dict__') def test_buffered_protocol(self): f = mock.Mock() @@ -41,7 +41,7 @@ def test_buffered_protocol(self): self.assertIsNone(p.buffer_updated(150)) self.assertIsNone(p.pause_writing()) self.assertIsNone(p.resume_writing()) - self.assertFalse(hasattr(p, '__dict__')) + self.assertNotHasAttr(p, '__dict__') def test_datagram_protocol(self): f = mock.Mock() @@ -50,7 +50,7 @@ def test_datagram_protocol(self): self.assertIsNone(dp.connection_lost(f)) self.assertIsNone(dp.error_received(f)) self.assertIsNone(dp.datagram_received(f, f)) - self.assertFalse(hasattr(dp, '__dict__')) + self.assertNotHasAttr(dp, '__dict__') def test_subprocess_protocol(self): f = mock.Mock() @@ -60,7 +60,7 @@ def test_subprocess_protocol(self): self.assertIsNone(sp.pipe_data_received(1, f)) self.assertIsNone(sp.pipe_connection_lost(1, f)) self.assertIsNone(sp.process_exited()) - self.assertFalse(hasattr(sp, '__dict__')) + self.assertNotHasAttr(sp, '__dict__') if __name__ == '__main__': diff --git a/Lib/test/test_asyncio/test_queues.py b/Lib/test/test_asyncio/test_queues.py index 1a8d604faea1fd..090b9774c2289f 100644 --- a/Lib/test/test_asyncio/test_queues.py +++ b/Lib/test/test_asyncio/test_queues.py @@ -18,7 +18,7 @@ async def _test_repr_or_str(self, fn, expect_id): appear in fn(Queue()). """ q = asyncio.Queue() - self.assertTrue(fn(q).startswith(' None: + self.sock = sock + + def __getattr__(self, name): + return getattr(self.sock, name) + + def send(self, data): + # Fake that our write buffer is full, send only half + to_send = len(data)//2 + return self.sock.send(data[:to_send]) + + def _fake_full_write_buffer(data): + if socket_transport._read_ready_cb is None and not isinstance(socket_transport._sock, SocketWrapper): + socket_transport._sock = SocketWrapper(socket_transport._sock) + return unittest.mock.DEFAULT + + with unittest.mock.patch.object( + socket_transport, "write", + wraps=socket_transport.write, + side_effect=_fake_full_write_buffer + ): + await future + + writer.close() + await self.wait_closed(writer) + + def run(meth): + def wrapper(sock): + try: + meth(sock) + except Exception as ex: + self.loop.call_soon_threadsafe(future.set_exception, ex) + else: + self.loop.call_soon_threadsafe(future.set_result, None) + return wrapper + + with self.tcp_server(run(server)) as srv: + self.loop.run_until_complete(client(srv.addr)) + + with self.tcp_server(run(eof_server)) as srv: + self.loop.run_until_complete(client(srv.addr)) + def test_connect_timeout_warning(self): s = socket.socket(socket.AF_INET) s.bind(('127.0.0.1', 0)) diff --git a/Lib/test/test_asyncio/test_staggered.py b/Lib/test/test_asyncio/test_staggered.py index 3c81b629693596..ad34aa6da01f54 100644 --- a/Lib/test/test_asyncio/test_staggered.py +++ b/Lib/test/test_asyncio/test_staggered.py @@ -122,3 +122,30 @@ async def do_set(): self.assertIsNone(excs[0], None) self.assertIsInstance(excs[1], asyncio.CancelledError) self.assertIsInstance(excs[2], asyncio.CancelledError) + + + async def test_cancelled(self): + log = [] + with self.assertRaises(TimeoutError): + async with asyncio.timeout(None) as cs_outer, asyncio.timeout(None) as cs_inner: + async def coro_fn(): + cs_inner.reschedule(-1) + await asyncio.sleep(0) + try: + await asyncio.sleep(0) + except asyncio.CancelledError: + log.append("cancelled 1") + + cs_outer.reschedule(-1) + await asyncio.sleep(0) + try: + await asyncio.sleep(0) + except asyncio.CancelledError: + log.append("cancelled 2") + try: + await staggered_race([coro_fn], delay=None) + except asyncio.CancelledError: + log.append("cancelled 3") + raise + + self.assertListEqual(log, ["cancelled 1", "cancelled 2", "cancelled 3"]) diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 047ada8c5d23df..673c6b46c647f3 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -50,7 +50,7 @@ def _basetest_open_connection(self, open_connection_fut): self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') f = reader.read() data = self.loop.run_until_complete(f) - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + self.assertEndsWith(data, b'\r\n\r\nTest message') writer.close() self.assertEqual(messages, []) @@ -75,7 +75,7 @@ def _basetest_open_connection_no_loop_ssl(self, open_connection_fut): writer.write(b'GET / HTTP/1.0\r\n\r\n') f = reader.read() data = self.loop.run_until_complete(f) - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + self.assertEndsWith(data, b'\r\n\r\nTest message') writer.close() self.assertEqual(messages, []) @@ -1002,7 +1002,7 @@ def test_wait_closed_on_close(self): self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') f = rd.read() data = self.loop.run_until_complete(f) - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + self.assertEndsWith(data, b'\r\n\r\nTest message') self.assertFalse(wr.is_closing()) wr.close() self.assertTrue(wr.is_closing()) @@ -1028,7 +1028,7 @@ async def inner(httpd): data = await rd.readline() self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') data = await rd.read() - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + self.assertEndsWith(data, b'\r\n\r\nTest message') wr.close() await wr.wait_closed() @@ -1048,7 +1048,7 @@ async def inner(httpd): data = await rd.readline() self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') data = await rd.read() - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + self.assertEndsWith(data, b'\r\n\r\nTest message') wr.close() with self.assertRaises(ConnectionResetError): wr.write(b'data') @@ -1089,12 +1089,12 @@ async def inner(httpd): data = await rd.readline() self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') data = await rd.read() - self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + self.assertEndsWith(data, b'\r\n\r\nTest message') with self.assertWarns(ResourceWarning) as cm: del wr gc.collect() self.assertEqual(len(cm.warnings), 1) - self.assertTrue(str(cm.warnings[0].message).startswith("unclosed 50 times - for _ in range(10 * ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("overridden", f(num)) def test_setvectorcall_load_attr_specialization_skip(self): from _testcapi import function_setvectorcall + _testinternalcapi = import_helper.import_module("_testinternalcapi") class X: def __getattribute__(self, attr): @@ -824,11 +822,12 @@ def __getattribute__(self, attr): function_setvectorcall(X.__getattribute__) # make sure specialization doesn't trigger # when vectorcall is overridden - for _ in range(ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("overridden", x.a) def test_setvectorcall_load_attr_specialization_deopt(self): from _testcapi import function_setvectorcall + _testinternalcapi = import_helper.import_module("_testinternalcapi") class X: def __getattribute__(self, attr): @@ -840,12 +839,12 @@ def get_a(x): assert_equal = self.assertEqual x = X() # trigger LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN specialization - for _ in range(ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("a", get_a(x)) function_setvectorcall(X.__getattribute__) # make sure specialized LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN # gets deopted due to overridden vectorcall - for _ in range(ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("overridden", get_a(x)) @requires_limited_api diff --git a/Lib/test/test_capi/test_abstract.py b/Lib/test/test_capi/test_abstract.py index 6a626813f23379..3de251bc5c241e 100644 --- a/Lib/test/test_capi/test_abstract.py +++ b/Lib/test/test_capi/test_abstract.py @@ -274,7 +274,7 @@ def test_object_setattr(self): # PyObject_SetAttr(obj, attr_name, NULL) removes the attribute xsetattr(obj, 'a', NULL) - self.assertFalse(hasattr(obj, 'a')) + self.assertNotHasAttr(obj, 'a') self.assertRaises(AttributeError, xsetattr, obj, 'b', NULL) self.assertRaises(RuntimeError, xsetattr, obj, 'evil', NULL) @@ -294,7 +294,7 @@ def test_object_setattrstring(self): # PyObject_SetAttrString(obj, attr_name, NULL) removes the attribute setattrstring(obj, b'a', NULL) - self.assertFalse(hasattr(obj, 'a')) + self.assertNotHasAttr(obj, 'a') self.assertRaises(AttributeError, setattrstring, obj, b'b', NULL) self.assertRaises(RuntimeError, setattrstring, obj, b'evil', NULL) @@ -311,10 +311,10 @@ def test_object_delattr(self): obj.a = 1 setattr(obj, '\U0001f40d', 2) xdelattr(obj, 'a') - self.assertFalse(hasattr(obj, 'a')) + self.assertNotHasAttr(obj, 'a') self.assertRaises(AttributeError, xdelattr, obj, 'b') xdelattr(obj, '\U0001f40d') - self.assertFalse(hasattr(obj, '\U0001f40d')) + self.assertNotHasAttr(obj, '\U0001f40d') self.assertRaises(AttributeError, xdelattr, 42, 'numerator') self.assertRaises(RuntimeError, xdelattr, obj, 'evil') @@ -328,10 +328,10 @@ def test_object_delattrstring(self): obj.a = 1 setattr(obj, '\U0001f40d', 2) delattrstring(obj, b'a') - self.assertFalse(hasattr(obj, 'a')) + self.assertNotHasAttr(obj, 'a') self.assertRaises(AttributeError, delattrstring, obj, b'b') delattrstring(obj, '\U0001f40d'.encode()) - self.assertFalse(hasattr(obj, '\U0001f40d')) + self.assertNotHasAttr(obj, '\U0001f40d') self.assertRaises(AttributeError, delattrstring, 42, b'numerator') self.assertRaises(RuntimeError, delattrstring, obj, b'evil') diff --git a/Lib/test/test_capi/test_codecs.py b/Lib/test/test_capi/test_codecs.py index a557e35e68915d..a0355c7a388c57 100644 --- a/Lib/test/test_capi/test_codecs.py +++ b/Lib/test/test_capi/test_codecs.py @@ -854,20 +854,18 @@ def test_codec_namereplace_errors_handler(self): self.do_test_codec_errors_handler(handler, self.unicode_encode_errors) def do_test_codec_errors_handler(self, handler, exceptions): - at_least_one = False + self.assertNotEqual(len(exceptions), 0) for exc in exceptions: - # See https://github.com/python/cpython/issues/123378 and related - # discussion and issues for details. - if self._exception_may_crash(exc): - continue - - at_least_one = True with self.subTest(handler=handler, exc=exc): # test that the handler does not crash - self.assertIsInstance(handler(exc), tuple) - - if exceptions: - self.assertTrue(at_least_one, "all exceptions are crashing") + res = handler(exc) + self.assertIsInstance(res, tuple) + self.assertEqual(len(res), 2) + replacement, continue_from = res + self.assertIsInstance(replacement, str) + self.assertIsInstance(continue_from, int) + self.assertGreaterEqual(continue_from, 0) + self.assertLessEqual(continue_from, len(exc.object)) for bad_exc in ( self.bad_unicode_errors @@ -876,30 +874,6 @@ def do_test_codec_errors_handler(self, handler, exceptions): with self.subTest('bad type', handler=handler, exc=bad_exc): self.assertRaises(TypeError, handler, bad_exc) - @classmethod - def _exception_may_crash(cls, exc): - """Indicate whether a Unicode exception might currently crash - the interpreter when used by a built-in codecs error handler. - - Until gh-123378 is fixed, we skip the tests for these exceptions. - - This should only be used by "do_test_codec_errors_handler". - """ - message, start, end = exc.object, exc.start, exc.end - match exc: - case UnicodeEncodeError(): - return end < start or (end - start) >= len(message) - case UnicodeDecodeError(): - # The case "end - start >= len(message)" does not crash. - return end < start - case UnicodeTranslateError(): - # Test "end <= start" because PyCodec_ReplaceErrors checks - # the Unicode kind of a 0-length string which by convention - # is PyUnicode_1BYTE_KIND and not PyUnicode_2BYTE_KIND as - # the handler currently expects. - return end <= start or (end - start) >= len(message) - return False - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_file.py b/Lib/test/test_capi/test_file.py new file mode 100644 index 00000000000000..b5767756992861 --- /dev/null +++ b/Lib/test/test_capi/test_file.py @@ -0,0 +1,305 @@ +import io +import os +import unittest +import warnings +from test import support +from test.support import import_helper, os_helper, warnings_helper + + +_testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') +_io = import_helper.import_module('_io') +NULL = None +STDOUT_FD = 1 + +with open(__file__, 'rb') as fp: + FIRST_LINE = next(fp).decode() +FIRST_LINE_NORM = FIRST_LINE.rstrip() + '\n' + + +class CAPIFileTest(unittest.TestCase): + def test_pyfile_fromfd(self): + # Test PyFile_FromFd() which is a thin wrapper to _io.open() + pyfile_fromfd = _testlimitedcapi.pyfile_fromfd + filename = __file__ + with open(filename, "rb") as fp: + fd = fp.fileno() + + # FileIO + fp.seek(0) + obj = pyfile_fromfd(fd, filename, "rb", 0, NULL, NULL, NULL, 0) + try: + self.assertIsInstance(obj, _io.FileIO) + self.assertEqual(obj.readline(), FIRST_LINE.encode()) + finally: + obj.close() + + # BufferedReader + fp.seek(0) + obj = pyfile_fromfd(fd, filename, "rb", 1024, NULL, NULL, NULL, 0) + try: + self.assertIsInstance(obj, _io.BufferedReader) + self.assertEqual(obj.readline(), FIRST_LINE.encode()) + finally: + obj.close() + + # TextIOWrapper + fp.seek(0) + obj = pyfile_fromfd(fd, filename, "r", 1, + "utf-8", "replace", NULL, 0) + try: + self.assertIsInstance(obj, _io.TextIOWrapper) + self.assertEqual(obj.encoding, "utf-8") + self.assertEqual(obj.errors, "replace") + self.assertEqual(obj.readline(), FIRST_LINE_NORM) + finally: + obj.close() + + def test_pyfile_getline(self): + # Test PyFile_GetLine(file, n): call file.readline() + # and strip "\n" suffix if n < 0. + pyfile_getline = _testlimitedcapi.pyfile_getline + + # Test Unicode + with open(__file__, "r") as fp: + fp.seek(0) + self.assertEqual(pyfile_getline(fp, -1), + FIRST_LINE_NORM.rstrip('\n')) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 0), + FIRST_LINE_NORM) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 6), + FIRST_LINE_NORM[:6]) + + # Test bytes + with open(__file__, "rb") as fp: + fp.seek(0) + self.assertEqual(pyfile_getline(fp, -1), + FIRST_LINE.rstrip('\n').encode()) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 0), + FIRST_LINE.encode()) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 6), + FIRST_LINE.encode()[:6]) + + def test_pyfile_writestring(self): + # Test PyFile_WriteString(str, file): call file.write(str) + writestr = _testlimitedcapi.pyfile_writestring + + with io.StringIO() as fp: + self.assertEqual(writestr("a\xe9\u20ac\U0010FFFF".encode(), fp), 0) + with self.assertRaises(UnicodeDecodeError): + writestr(b"\xff", fp) + with self.assertRaises(UnicodeDecodeError): + writestr("\udc80".encode("utf-8", "surrogatepass"), fp) + + text = fp.getvalue() + self.assertEqual(text, "a\xe9\u20ac\U0010FFFF") + + with self.assertRaises(SystemError): + writestr(b"abc", NULL) + + def test_pyfile_writeobject(self): + # Test PyFile_WriteObject(obj, file, flags): + # - Call file.write(str(obj)) if flags equals Py_PRINT_RAW. + # - Call file.write(repr(obj)) otherwise. + writeobject = _testlimitedcapi.pyfile_writeobject + Py_PRINT_RAW = 1 + + with io.StringIO() as fp: + # Test flags=Py_PRINT_RAW + self.assertEqual(writeobject("raw", fp, Py_PRINT_RAW), 0) + writeobject(NULL, fp, Py_PRINT_RAW) + + # Test flags=0 + self.assertEqual(writeobject("repr", fp, 0), 0) + writeobject(NULL, fp, 0) + + text = fp.getvalue() + self.assertEqual(text, "raw'repr'") + + # invalid file type + for invalid_file in (123, "abc", object()): + with self.subTest(file=invalid_file): + with self.assertRaises(AttributeError): + writeobject("abc", invalid_file, Py_PRINT_RAW) + + with self.assertRaises(TypeError): + writeobject("abc", NULL, 0) + + def test_pyobject_asfiledescriptor(self): + # Test PyObject_AsFileDescriptor(obj): + # - Return obj if obj is an integer. + # - Return obj.fileno() otherwise. + # File descriptor must be >= 0. + asfd = _testlimitedcapi.pyobject_asfiledescriptor + + self.assertEqual(asfd(123), 123) + self.assertEqual(asfd(0), 0) + + with open(__file__, "rb") as fp: + self.assertEqual(asfd(fp), fp.fileno()) + + # bool emits RuntimeWarning + msg = r"bool is used as a file descriptor" + with warnings_helper.check_warnings((msg, RuntimeWarning)): + self.assertEqual(asfd(True), 1) + + class FakeFile: + def __init__(self, fd): + self.fd = fd + def fileno(self): + return self.fd + + # file descriptor must be positive + with self.assertRaises(ValueError): + asfd(-1) + with self.assertRaises(ValueError): + asfd(FakeFile(-1)) + + # fileno() result must be an integer + with self.assertRaises(TypeError): + asfd(FakeFile("text")) + + # unsupported types + for obj in ("string", ["list"], object()): + with self.subTest(obj=obj): + with self.assertRaises(TypeError): + asfd(obj) + + # CRASHES asfd(NULL) + + def test_pyfile_newstdprinter(self): + # Test PyFile_NewStdPrinter() + pyfile_newstdprinter = _testcapi.pyfile_newstdprinter + + file = pyfile_newstdprinter(STDOUT_FD) + self.assertEqual(file.closed, False) + self.assertIsNone(file.encoding) + self.assertEqual(file.mode, "w") + + self.assertEqual(file.fileno(), STDOUT_FD) + self.assertEqual(file.isatty(), os.isatty(STDOUT_FD)) + + # flush() is a no-op + self.assertIsNone(file.flush()) + + # close() is a no-op + self.assertIsNone(file.close()) + self.assertEqual(file.closed, False) + + support.check_disallow_instantiation(self, type(file)) + + def test_pyfile_newstdprinter_write(self): + # Test the write() method of PyFile_NewStdPrinter() + pyfile_newstdprinter = _testcapi.pyfile_newstdprinter + + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) + + try: + old_stdout = os.dup(STDOUT_FD) + except OSError as exc: + # os.dup(STDOUT_FD) is not supported on WASI + self.skipTest(f"os.dup() failed with {exc!r}") + + try: + with open(filename, "wb") as fp: + # PyFile_NewStdPrinter() only accepts fileno(stdout) + # or fileno(stderr) file descriptor. + fd = fp.fileno() + os.dup2(fd, STDOUT_FD) + + file = pyfile_newstdprinter(STDOUT_FD) + self.assertEqual(file.write("text"), 4) + # The surrogate character is encoded with + # the "surrogateescape" error handler + self.assertEqual(file.write("[\udc80]"), 8) + finally: + os.dup2(old_stdout, STDOUT_FD) + os.close(old_stdout) + + with open(filename, "r") as fp: + self.assertEqual(fp.read(), "text[\\udc80]") + + def test_py_fopen(self): + # Test Py_fopen() and Py_fclose() + py_fopen = _testcapi.py_fopen + + with open(__file__, "rb") as fp: + source = fp.read() + + for filename in (__file__, os.fsencode(__file__)): + with self.subTest(filename=filename): + data = py_fopen(filename, "rb") + self.assertEqual(data, source[:256]) + + data = py_fopen(os_helper.FakePath(filename), "rb") + self.assertEqual(data, source[:256]) + + filenames = [ + os_helper.TESTFN, + os.fsencode(os_helper.TESTFN), + ] + if os_helper.TESTFN_UNDECODABLE is not None: + filenames.append(os_helper.TESTFN_UNDECODABLE) + filenames.append(os.fsdecode(os_helper.TESTFN_UNDECODABLE)) + if os_helper.TESTFN_UNENCODABLE is not None: + filenames.append(os_helper.TESTFN_UNENCODABLE) + for filename in filenames: + with self.subTest(filename=filename): + try: + with open(filename, "wb") as fp: + fp.write(source) + except OSError: + # TESTFN_UNDECODABLE cannot be used to create a file + # on macOS/WASI. + filename = None + continue + try: + data = py_fopen(filename, "rb") + self.assertEqual(data, source[:256]) + finally: + os_helper.unlink(filename) + + # embedded null character/byte in the filename + with self.assertRaises(ValueError): + py_fopen("a\x00b", "rb") + with self.assertRaises(ValueError): + py_fopen(b"a\x00b", "rb") + + # non-ASCII mode failing with "Invalid argument" + with self.assertRaises(OSError): + py_fopen(__file__, b"\xc2\x80") + with self.assertRaises(OSError): + # \x98 is invalid in cp1250, cp1251, cp1257 + # \x9d is invalid in cp1252-cp1255, cp1258 + py_fopen(__file__, b"\xc2\x98\xc2\x9d") + # UnicodeDecodeError can come from the audit hook code + with self.assertRaises((UnicodeDecodeError, OSError)): + py_fopen(__file__, b"\x98\x9d") + + # invalid filename type + for invalid_type in (123, object()): + with self.subTest(filename=invalid_type): + with self.assertRaises(TypeError): + py_fopen(invalid_type, "rb") + + if support.MS_WINDOWS: + with self.assertRaises(OSError): + # On Windows, the file mode is limited to 10 characters + py_fopen(__file__, "rt+, ccs=UTF-8") + + # CRASHES py_fopen(NULL, 'rb') + # CRASHES py_fopen(__file__, NULL) + + # TODO: Test Py_UniversalNewlineFgets() + + # PyFile_SetOpenCodeHook() and PyFile_OpenCode() are tested by + # test_embed.test_open_code_hook() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_frame.py b/Lib/test/test_capi/test_frame.py new file mode 100644 index 00000000000000..23cb8e3dada9d4 --- /dev/null +++ b/Lib/test/test_capi/test_frame.py @@ -0,0 +1,56 @@ +import sys +import unittest +from test.support import import_helper + + +_testcapi = import_helper.import_module('_testcapi') + + +class FrameTest(unittest.TestCase): + def getframe(self): + return sys._getframe() + + def test_frame_getters(self): + frame = self.getframe() + self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) + self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) + self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) + self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) + + def test_getvar(self): + current_frame = sys._getframe() + x = 1 + self.assertEqual(_testcapi.frame_getvar(current_frame, "x"), 1) + self.assertEqual(_testcapi.frame_getvarstring(current_frame, b"x"), 1) + with self.assertRaises(NameError): + _testcapi.frame_getvar(current_frame, "y") + with self.assertRaises(NameError): + _testcapi.frame_getvarstring(current_frame, b"y") + + # wrong name type + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, b'x') + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, 123) + + def getgenframe(self): + yield sys._getframe() + + def test_frame_get_generator(self): + gen = self.getgenframe() + frame = next(gen) + self.assertIs(gen, _testcapi.frame_getgenerator(frame)) + + def test_frame_fback_api(self): + """Test that accessing `f_back` does not cause a segmentation fault on + a frame created with `PyFrame_New` (GH-99110).""" + def dummy(): + pass + + frame = _testcapi.frame_new(dummy.__code__, globals(), locals()) + # The following line should not cause a segmentation fault. + self.assertIsNone(frame.f_back) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_function.py b/Lib/test/test_capi/test_function.py new file mode 100644 index 00000000000000..9dca377e28ba42 --- /dev/null +++ b/Lib/test/test_capi/test_function.py @@ -0,0 +1,323 @@ +import unittest +from test.support import import_helper + + +_testcapi = import_helper.import_module('_testcapi') + + +class FunctionTest(unittest.TestCase): + def test_function_get_code(self): + # Test PyFunction_GetCode() + import types + + def some(): + pass + + code = _testcapi.function_get_code(some) + self.assertIsInstance(code, types.CodeType) + self.assertEqual(code, some.__code__) + + with self.assertRaises(SystemError): + _testcapi.function_get_code(None) # not a function + + def test_function_get_globals(self): + # Test PyFunction_GetGlobals() + def some(): + pass + + globals_ = _testcapi.function_get_globals(some) + self.assertIsInstance(globals_, dict) + self.assertEqual(globals_, some.__globals__) + + with self.assertRaises(SystemError): + _testcapi.function_get_globals(None) # not a function + + def test_function_get_module(self): + # Test PyFunction_GetModule() + def some(): + pass + + module = _testcapi.function_get_module(some) + self.assertIsInstance(module, str) + self.assertEqual(module, some.__module__) + + with self.assertRaises(SystemError): + _testcapi.function_get_module(None) # not a function + + def test_function_get_defaults(self): + # Test PyFunction_GetDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_defaults(some) + self.assertEqual(defaults, ('p', 0, None)) + self.assertEqual(defaults, some.__defaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_defaults(None) # not a function + + def test_function_set_defaults(self): + # Test PyFunction_SetDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = ('p', 0, None) + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(some, 1) # not tuple or None + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(1, ()) # not a function + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + new_defaults = ('q', 1, None) + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + # Empty tuple is fine: + new_defaults = () + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + class tuplesub(tuple): ... # tuple subclasses must work + + new_defaults = tuplesub(((1, 2), ['a', 'b'], None)) + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + # `None` is special, it sets `defaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_defaults(some, None) + self.assertEqual(_testcapi.function_get_defaults(some), None) + self.assertEqual(some.__defaults__, None) + + def test_function_get_kw_defaults(self): + # Test PyFunction_GetKwDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_kw_defaults(some) + self.assertEqual(defaults, {'kw2': True}) + self.assertEqual(defaults, some.__kwdefaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_kw_defaults(None) # not a function + + def test_function_set_kw_defaults(self): + # Test PyFunction_SetKwDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = {'kw2': True} + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(some, 1) # not dict or None + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(1, {}) # not a function + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + new_defaults = {'kw2': (1, 2, 3)} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # Empty dict is fine: + new_defaults = {} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + class dictsub(dict): ... # dict subclasses must work + + new_defaults = dictsub({'kw2': None}) + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # `None` is special, it sets `kwdefaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_kw_defaults(some, None) + self.assertEqual(_testcapi.function_get_kw_defaults(some), None) + self.assertEqual(some.__kwdefaults__, None) + + def test_function_get_closure(self): + # Test PyFunction_GetClosure() + from types import CellType + + def regular_function(): ... + def unused_one_level(arg1): + def inner(arg2, arg3): ... + return inner + def unused_two_levels(arg1, arg2): + def decorator(arg3, arg4): + def inner(arg5, arg6): ... + return inner + return decorator + def with_one_level(arg1): + def inner(arg2, arg3): + return arg1 + arg2 + arg3 + return inner + def with_two_levels(arg1, arg2): + def decorator(arg3, arg4): + def inner(arg5, arg6): + return arg1 + arg2 + arg3 + arg4 + arg5 + arg6 + return inner + return decorator + + # Functions without closures: + self.assertIsNone(_testcapi.function_get_closure(regular_function)) + self.assertIsNone(regular_function.__closure__) + + func = unused_one_level(1) + closure = _testcapi.function_get_closure(func) + self.assertIsNone(closure) + self.assertIsNone(func.__closure__) + + func = unused_two_levels(1, 2)(3, 4) + closure = _testcapi.function_get_closure(func) + self.assertIsNone(closure) + self.assertIsNone(func.__closure__) + + # Functions with closures: + func = with_one_level(5) + closure = _testcapi.function_get_closure(func) + self.assertEqual(closure, func.__closure__) + self.assertIsInstance(closure, tuple) + self.assertEqual(len(closure), 1) + self.assertEqual(len(closure), len(func.__code__.co_freevars)) + for cell in closure: + self.assertIsInstance(cell, CellType) + self.assertTrue(closure[0].cell_contents, 5) + + func = with_two_levels(1, 2)(3, 4) + closure = _testcapi.function_get_closure(func) + self.assertEqual(closure, func.__closure__) + self.assertIsInstance(closure, tuple) + self.assertEqual(len(closure), 4) + self.assertEqual(len(closure), len(func.__code__.co_freevars)) + for cell in closure: + self.assertIsInstance(cell, CellType) + self.assertEqual([cell.cell_contents for cell in closure], + [1, 2, 3, 4]) + + def test_function_get_closure_error(self): + # Test PyFunction_GetClosure() + with self.assertRaises(SystemError): + _testcapi.function_get_closure(1) + with self.assertRaises(SystemError): + _testcapi.function_get_closure(None) + + def test_function_set_closure(self): + # Test PyFunction_SetClosure() + from types import CellType + + def function_without_closure(): ... + def function_with_closure(arg): + def inner(): + return arg + return inner + + func = function_without_closure + _testcapi.function_set_closure(func, (CellType(1), CellType(1))) + closure = _testcapi.function_get_closure(func) + self.assertEqual([c.cell_contents for c in closure], [1, 1]) + self.assertEqual([c.cell_contents for c in func.__closure__], [1, 1]) + + func = function_with_closure(1) + _testcapi.function_set_closure(func, + (CellType(1), CellType(2), CellType(3))) + closure = _testcapi.function_get_closure(func) + self.assertEqual([c.cell_contents for c in closure], [1, 2, 3]) + self.assertEqual([c.cell_contents for c in func.__closure__], [1, 2, 3]) + + def test_function_set_closure_none(self): + # Test PyFunction_SetClosure() + def function_without_closure(): ... + def function_with_closure(arg): + def inner(): + return arg + return inner + + _testcapi.function_set_closure(function_without_closure, None) + self.assertIsNone( + _testcapi.function_get_closure(function_without_closure)) + self.assertIsNone(function_without_closure.__closure__) + + _testcapi.function_set_closure(function_with_closure, None) + self.assertIsNone( + _testcapi.function_get_closure(function_with_closure)) + self.assertIsNone(function_with_closure.__closure__) + + def test_function_set_closure_errors(self): + # Test PyFunction_SetClosure() + def function_without_closure(): ... + + with self.assertRaises(SystemError): + _testcapi.function_set_closure(None, ()) # not a function + + with self.assertRaises(SystemError): + _testcapi.function_set_closure(function_without_closure, 1) + self.assertIsNone(function_without_closure.__closure__) # no change + + # NOTE: this works, but goes against the docs: + _testcapi.function_set_closure(function_without_closure, (1, 2)) + self.assertEqual( + _testcapi.function_get_closure(function_without_closure), (1, 2)) + self.assertEqual(function_without_closure.__closure__, (1, 2)) + + # TODO: test PyFunction_New() + # TODO: test PyFunction_NewWithQualName() + # TODO: test PyFunction_SetVectorcall() + # TODO: test PyFunction_GetAnnotations() + # TODO: test PyFunction_SetAnnotations() + # TODO: test PyClassMethod_New() + # TODO: test PyStaticMethod_New() + # + # PyFunction_AddWatcher() and PyFunction_ClearWatcher() are tested by + # test_capi.test_watchers. + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_immortal.py b/Lib/test/test_capi/test_immortal.py index 3e36913ac301c3..660e8a0e789366 100644 --- a/Lib/test/test_capi/test_immortal.py +++ b/Lib/test/test_capi/test_immortal.py @@ -5,12 +5,22 @@ _testinternalcapi = import_helper.import_module('_testinternalcapi') -class TestCAPI(unittest.TestCase): - def test_immortal_builtins(self): - _testcapi.test_immortal_builtins() +class TestUnstableCAPI(unittest.TestCase): + def test_immortal(self): + # Not extensive + known_immortals = (True, False, None, 0, ()) + for immortal in known_immortals: + with self.subTest(immortal=immortal): + self.assertTrue(_testcapi.is_immortal(immortal)) + + # Some arbitrary mutable objects + non_immortals = (object(), self, [object()]) + for non_immortal in non_immortals: + with self.subTest(non_immortal=non_immortal): + self.assertFalse(_testcapi.is_immortal(non_immortal)) + + # CRASHES _testcapi.is_immortal(NULL) - def test_immortal_small_ints(self): - _testcapi.test_immortal_small_ints() class TestInternalCAPI(unittest.TestCase): diff --git a/Lib/test/test_capi/test_import.py b/Lib/test/test_capi/test_import.py new file mode 100644 index 00000000000000..25136624ca4ed9 --- /dev/null +++ b/Lib/test/test_capi/test_import.py @@ -0,0 +1,381 @@ +import importlib.util +import os.path +import sys +import types +import unittest +from test.support import os_helper +from test.support import import_helper +from test.support.warnings_helper import check_warnings + +_testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') +NULL = None + + +class ImportTests(unittest.TestCase): + def test_getmagicnumber(self): + # Test PyImport_GetMagicNumber() + magic = _testlimitedcapi.PyImport_GetMagicNumber() + self.assertEqual(magic, + int.from_bytes(importlib.util.MAGIC_NUMBER, 'little')) + + def test_getmagictag(self): + # Test PyImport_GetMagicTag() + tag = _testlimitedcapi.PyImport_GetMagicTag() + self.assertEqual(tag, sys.implementation.cache_tag) + + def test_getmoduledict(self): + # Test PyImport_GetModuleDict() + modules = _testlimitedcapi.PyImport_GetModuleDict() + self.assertIs(modules, sys.modules) + + def check_import_loaded_module(self, import_module): + for name in ('os', 'sys', 'test', 'unittest'): + with self.subTest(name=name): + self.assertIn(name, sys.modules) + old_module = sys.modules[name] + module = import_module(name) + self.assertIsInstance(module, types.ModuleType) + self.assertIs(module, old_module) + + def check_import_fresh_module(self, import_module): + old_modules = dict(sys.modules) + try: + for name in ('colorsys', 'math'): + with self.subTest(name=name): + sys.modules.pop(name, None) + module = import_module(name) + self.assertIsInstance(module, types.ModuleType) + self.assertIs(module, sys.modules[name]) + self.assertEqual(module.__name__, name) + finally: + sys.modules.clear() + sys.modules.update(old_modules) + + def test_getmodule(self): + # Test PyImport_GetModule() + getmodule = _testlimitedcapi.PyImport_GetModule + self.check_import_loaded_module(getmodule) + + nonexistent = 'nonexistent' + self.assertNotIn(nonexistent, sys.modules) + self.assertIs(getmodule(nonexistent), KeyError) + self.assertIs(getmodule(''), KeyError) + self.assertIs(getmodule(object()), KeyError) + + self.assertRaises(TypeError, getmodule, []) # unhashable + # CRASHES getmodule(NULL) + + def check_addmodule(self, add_module, accept_nonstr=False): + # create a new module + names = ['nonexistent'] + if accept_nonstr: + names.append(b'\xff') # non-UTF-8 + # PyImport_AddModuleObject() accepts non-string names + names.append(tuple(['hashable non-string'])) + for name in names: + with self.subTest(name=name): + self.assertNotIn(name, sys.modules) + try: + module = add_module(name) + self.assertIsInstance(module, types.ModuleType) + self.assertEqual(module.__name__, name) + self.assertIs(module, sys.modules[name]) + finally: + sys.modules.pop(name, None) + + # get an existing module + self.check_import_loaded_module(add_module) + + def test_addmoduleobject(self): + # Test PyImport_AddModuleObject() + addmoduleobject = _testlimitedcapi.PyImport_AddModuleObject + self.check_addmodule(addmoduleobject, accept_nonstr=True) + + self.assertRaises(TypeError, addmoduleobject, []) # unhashable + # CRASHES addmoduleobject(NULL) + + def test_addmodule(self): + # Test PyImport_AddModule() + addmodule = _testlimitedcapi.PyImport_AddModule + self.check_addmodule(addmodule) + + self.assertRaises(UnicodeDecodeError, addmodule, b'\xff') + # CRASHES addmodule(NULL) + + def test_addmoduleref(self): + # Test PyImport_AddModuleRef() + addmoduleref = _testlimitedcapi.PyImport_AddModuleRef + self.check_addmodule(addmoduleref) + + self.assertRaises(UnicodeDecodeError, addmoduleref, b'\xff') + # CRASHES addmoduleref(NULL) + + def check_import_func(self, import_module): + self.check_import_loaded_module(import_module) + self.check_import_fresh_module(import_module) + self.assertRaises(ModuleNotFoundError, import_module, 'nonexistent') + self.assertRaises(ValueError, import_module, '') + + def test_import(self): + # Test PyImport_Import() + import_ = _testlimitedcapi.PyImport_Import + self.check_import_func(import_) + + self.assertRaises(TypeError, import_, b'os') + self.assertRaises(SystemError, import_, NULL) + + def test_importmodule(self): + # Test PyImport_ImportModule() + importmodule = _testlimitedcapi.PyImport_ImportModule + self.check_import_func(importmodule) + + self.assertRaises(UnicodeDecodeError, importmodule, b'\xff') + # CRASHES importmodule(NULL) + + def test_importmodulenoblock(self): + # Test deprecated PyImport_ImportModuleNoBlock() + importmodulenoblock = _testlimitedcapi.PyImport_ImportModuleNoBlock + with check_warnings(('', DeprecationWarning)): + self.check_import_func(importmodulenoblock) + self.assertRaises(UnicodeDecodeError, importmodulenoblock, b'\xff') + + # CRASHES importmodulenoblock(NULL) + + def check_frozen_import(self, import_frozen_module): + # Importing a frozen module executes its code, so start by unloading + # the module to execute the code in a new (temporary) module. + old_zipimport = sys.modules.pop('zipimport') + try: + self.assertEqual(import_frozen_module('zipimport'), 1) + + # import zipimport again + self.assertEqual(import_frozen_module('zipimport'), 1) + finally: + sys.modules['zipimport'] = old_zipimport + + # not a frozen module + self.assertEqual(import_frozen_module('sys'), 0) + self.assertEqual(import_frozen_module('nonexistent'), 0) + self.assertEqual(import_frozen_module(''), 0) + + def test_importfrozenmodule(self): + # Test PyImport_ImportFrozenModule() + importfrozenmodule = _testlimitedcapi.PyImport_ImportFrozenModule + self.check_frozen_import(importfrozenmodule) + + self.assertRaises(UnicodeDecodeError, importfrozenmodule, b'\xff') + # CRASHES importfrozenmodule(NULL) + + def test_importfrozenmoduleobject(self): + # Test PyImport_ImportFrozenModuleObject() + importfrozenmoduleobject = _testlimitedcapi.PyImport_ImportFrozenModuleObject + self.check_frozen_import(importfrozenmoduleobject) + self.assertEqual(importfrozenmoduleobject(b'zipimport'), 0) + self.assertEqual(importfrozenmoduleobject(NULL), 0) + + def test_importmoduleex(self): + # Test PyImport_ImportModuleEx() + importmoduleex = _testlimitedcapi.PyImport_ImportModuleEx + self.check_import_func(lambda name: importmoduleex(name, NULL, NULL, NULL)) + + self.assertRaises(ModuleNotFoundError, importmoduleex, 'nonexistent', NULL, NULL, NULL) + self.assertRaises(ValueError, importmoduleex, '', NULL, NULL, NULL) + self.assertRaises(UnicodeDecodeError, importmoduleex, b'\xff', NULL, NULL, NULL) + # CRASHES importmoduleex(NULL, NULL, NULL, NULL) + + def check_importmodulelevel(self, importmodulelevel): + self.check_import_func(lambda name: importmodulelevel(name, NULL, NULL, NULL, 0)) + + self.assertRaises(ModuleNotFoundError, importmodulelevel, 'nonexistent', NULL, NULL, NULL, 0) + self.assertRaises(ValueError, importmodulelevel, '', NULL, NULL, NULL, 0) + + if __package__: + self.assertIs(importmodulelevel('test_import', globals(), NULL, NULL, 1), + sys.modules['test.test_capi.test_import']) + self.assertIs(importmodulelevel('test_capi', globals(), NULL, NULL, 2), + sys.modules['test.test_capi']) + self.assertRaises(ValueError, importmodulelevel, 'os', NULL, NULL, NULL, -1) + with self.assertWarns(ImportWarning): + self.assertRaises(KeyError, importmodulelevel, 'test_import', {}, NULL, NULL, 1) + self.assertRaises(TypeError, importmodulelevel, 'test_import', [], NULL, NULL, 1) + + def test_importmodulelevel(self): + # Test PyImport_ImportModuleLevel() + importmodulelevel = _testlimitedcapi.PyImport_ImportModuleLevel + self.check_importmodulelevel(importmodulelevel) + + self.assertRaises(UnicodeDecodeError, importmodulelevel, b'\xff', NULL, NULL, NULL, 0) + # CRASHES importmodulelevel(NULL, NULL, NULL, NULL, 0) + + def test_importmodulelevelobject(self): + # Test PyImport_ImportModuleLevelObject() + importmodulelevel = _testlimitedcapi.PyImport_ImportModuleLevelObject + self.check_importmodulelevel(importmodulelevel) + + self.assertRaises(TypeError, importmodulelevel, b'os', NULL, NULL, NULL, 0) + self.assertRaises(ValueError, importmodulelevel, NULL, NULL, NULL, NULL, 0) + + def check_executecodemodule(self, execute_code, *args): + name = 'test_import_executecode' + try: + # Create a temporary module where the code will be executed + self.assertNotIn(name, sys.modules) + module = _testlimitedcapi.PyImport_AddModuleRef(name) + self.assertNotHasAttr(module, 'attr') + + # Execute the code + code = compile('attr = 1', '', 'exec') + module2 = execute_code(name, code, *args) + self.assertIs(module2, module) + + # Check the function side effects + self.assertEqual(module.attr, 1) + finally: + sys.modules.pop(name, None) + return module.__spec__.origin + + def test_executecodemodule(self): + # Test PyImport_ExecCodeModule() + execcodemodule = _testlimitedcapi.PyImport_ExecCodeModule + self.check_executecodemodule(execcodemodule) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(UnicodeDecodeError, execcodemodule, b'\xff', code) + # CRASHES execcodemodule(NULL, code) + # CRASHES execcodemodule(name, NULL) + + def test_executecodemoduleex(self): + # Test PyImport_ExecCodeModuleEx() + execcodemoduleex = _testlimitedcapi.PyImport_ExecCodeModuleEx + + # Test NULL path (it should not crash) + self.check_executecodemodule(execcodemoduleex, NULL) + + # Test non-NULL path + pathname = b'pathname' + origin = self.check_executecodemodule(execcodemoduleex, pathname) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + + pathname = os_helper.TESTFN_UNDECODABLE + if pathname: + origin = self.check_executecodemodule(execcodemoduleex, pathname) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(UnicodeDecodeError, execcodemoduleex, b'\xff', code, NULL) + # CRASHES execcodemoduleex(NULL, code, NULL) + # CRASHES execcodemoduleex(name, NULL, NULL) + + def check_executecode_pathnames(self, execute_code_func, object=False): + # Test non-NULL pathname and NULL cpathname + + # Test NULL paths (it should not crash) + self.check_executecodemodule(execute_code_func, NULL, NULL) + + pathname = 'pathname' + origin = self.check_executecodemodule(execute_code_func, pathname, NULL) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + origin = self.check_executecodemodule(execute_code_func, NULL, pathname) + if not object: + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + + pathname = os_helper.TESTFN_UNDECODABLE + if pathname: + if object: + pathname = os.fsdecode(pathname) + origin = self.check_executecodemodule(execute_code_func, pathname, NULL) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + self.check_executecodemodule(execute_code_func, NULL, pathname) + + # Test NULL pathname and non-NULL cpathname + pyc_filename = importlib.util.cache_from_source(__file__) + py_filename = importlib.util.source_from_cache(pyc_filename) + origin = self.check_executecodemodule(execute_code_func, NULL, pyc_filename) + if not object: + self.assertEqual(origin, py_filename) + + def test_executecodemodulewithpathnames(self): + # Test PyImport_ExecCodeModuleWithPathnames() + execute_code_func = _testlimitedcapi.PyImport_ExecCodeModuleWithPathnames + self.check_executecode_pathnames(execute_code_func) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(UnicodeDecodeError, execute_code_func, b'\xff', code, NULL, NULL) + # CRASHES execute_code_func(NULL, code, NULL, NULL) + # CRASHES execute_code_func(name, NULL, NULL, NULL) + + def test_executecodemoduleobject(self): + # Test PyImport_ExecCodeModuleObject() + execute_code_func = _testlimitedcapi.PyImport_ExecCodeModuleObject + self.check_executecode_pathnames(execute_code_func, object=True) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(TypeError, execute_code_func, [], code, NULL, NULL) + nonstring = tuple(['hashable non-string']) + self.assertRaises(AttributeError, execute_code_func, nonstring, code, NULL, NULL) + sys.modules.pop(nonstring, None) + # CRASHES execute_code_func(NULL, code, NULL, NULL) + # CRASHES execute_code_func(name, NULL, NULL, NULL) + + def check_importmoduleattr(self, importmoduleattr): + self.assertIs(importmoduleattr('sys', 'argv'), sys.argv) + self.assertIs(importmoduleattr('types', 'ModuleType'), types.ModuleType) + + # module name containing a dot + attr = importmoduleattr('email.message', 'Message') + from email.message import Message + self.assertIs(attr, Message) + + with self.assertRaises(ImportError): + # nonexistent module + importmoduleattr('nonexistentmodule', 'attr') + with self.assertRaises(AttributeError): + # nonexistent attribute + importmoduleattr('sys', 'nonexistentattr') + with self.assertRaises(AttributeError): + # attribute name containing a dot + importmoduleattr('sys', 'implementation.name') + + def test_importmoduleattr(self): + # Test PyImport_ImportModuleAttr() + importmoduleattr = _testcapi.PyImport_ImportModuleAttr + self.check_importmoduleattr(importmoduleattr) + + # Invalid module name type + for mod_name in (object(), 123, b'bytes'): + with self.subTest(mod_name=mod_name): + with self.assertRaises(TypeError): + importmoduleattr(mod_name, "attr") + + # Invalid attribute name type + for attr_name in (object(), 123, b'bytes'): + with self.subTest(attr_name=attr_name): + with self.assertRaises(TypeError): + importmoduleattr("sys", attr_name) + + with self.assertRaises(SystemError): + importmoduleattr(NULL, "argv") + # CRASHES importmoduleattr("sys", NULL) + + def test_importmoduleattrstring(self): + # Test PyImport_ImportModuleAttrString() + importmoduleattr = _testcapi.PyImport_ImportModuleAttrString + self.check_importmoduleattr(importmoduleattr) + + with self.assertRaises(UnicodeDecodeError): + importmoduleattr(b"sys\xff", "argv") + with self.assertRaises(UnicodeDecodeError): + importmoduleattr("sys", b"argv\xff") + + # CRASHES importmoduleattr(NULL, "argv") + # CRASHES importmoduleattr("sys", NULL) + + # TODO: test PyImport_GetImporter() + # TODO: test PyImport_ReloadModule() + # TODO: test PyImport_ExtendInittab() + # PyImport_AppendInittab() is tested by test_embed + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index ada30181aeeca9..b218f72f1bbce0 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -75,6 +75,11 @@ class InstanceMethod: id = _testcapi.instancemethod(id) testfunction = _testcapi.instancemethod(testfunction) + +CURRENT_THREAD_REGEX = r'Current thread.*:\n' if not support.Py_GIL_DISABLED else r'Stack .*:\n' + + +@support.force_not_colorized_test_class class CAPITest(unittest.TestCase): def test_instancemethod(self): @@ -114,8 +119,7 @@ def test_no_FatalError_infinite_loop(self): "after Python initialization and before Python finalization, " "but it was called without an active thread state. " "Are you trying to call the C API inside of a Py_BEGIN_ALLOW_THREADS block?").encode() - self.assertTrue(err.rstrip().startswith(msg), - err) + self.assertStartsWith(err.rstrip(), msg) def test_memoryview_from_NULL_pointer(self): self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer) @@ -234,8 +238,8 @@ def test_return_null_without_error(self): r'Python runtime state: initialized\n' r'SystemError: ' r'returned NULL without setting an exception\n' - r'\n' - r'Current thread.*:\n' + r'\n' + + CURRENT_THREAD_REGEX + r' File .*", line 6 in \n') else: with self.assertRaises(SystemError) as cm: @@ -268,8 +272,8 @@ def test_return_result_with_error(self): r'SystemError: ' r'returned a result with an exception set\n' - r'\n' - r'Current thread.*:\n' + r'\n' + + CURRENT_THREAD_REGEX + r' File .*, line 6 in \n') else: with self.assertRaises(SystemError) as cm: @@ -298,11 +302,11 @@ def test_getitem_with_error(self): r'with an exception set\n' r'Python runtime state: initialized\n' r'ValueError: bug\n' - r'\n' - r'Current thread .* \(most recent call first\):\n' + r'\n' + + CURRENT_THREAD_REGEX + r' File .*, line 6 in \n' r'\n' - r'Extension modules: _testcapi \(total: 1\)\n') + r'Extension modules: _testcapi, _testinternalcapi \(total: 2\)\n') else: # Python built with NDEBUG macro defined: # test _Py_CheckFunctionResult() instead. @@ -399,42 +403,6 @@ def test_buildvalue_ints(self): def test_buildvalue_N(self): _testcapi.test_buildvalue_N() - def check_negative_refcount(self, code): - # bpo-35059: Check that Py_DECREF() reports the correct filename - # when calling _Py_NegativeRefcount() to abort Python. - code = textwrap.dedent(code) - rc, out, err = assert_python_failure('-c', code) - self.assertRegex(err, - br'_testcapimodule\.c:[0-9]+: ' - br'_Py_NegativeRefcount: Assertion failed: ' - br'object has negative ref count') - - @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), - 'need _testcapi.negative_refcount()') - def test_negative_refcount(self): - code = """ - import _testcapi - from test import support - - with support.SuppressCrashReport(): - _testcapi.negative_refcount() - """ - self.check_negative_refcount(code) - - @unittest.skipUnless(hasattr(_testcapi, 'decref_freed_object'), - 'need _testcapi.decref_freed_object()') - @support.skip_if_sanitizer("use after free on purpose", - address=True, memory=True, ub=True) - def test_decref_freed_object(self): - code = """ - import _testcapi - from test import support - - with support.SuppressCrashReport(): - _testcapi.decref_freed_object() - """ - self.check_negative_refcount(code) - def test_trashcan_subclass(self): # bpo-35983: Check that the trashcan mechanism for "list" is NOT # activated when its tp_dealloc is being called by a subclass @@ -718,7 +686,7 @@ def test_heaptype_with_setattro(self): def test_heaptype_with_custom_metaclass(self): metaclass = _testcapi.HeapCTypeMetaclass - self.assertTrue(issubclass(metaclass, type)) + self.assertIsSubclass(metaclass, type) # Class creation from C t = _testcapi.pytype_fromspec_meta(metaclass) @@ -734,7 +702,7 @@ def test_heaptype_with_custom_metaclass(self): def test_heaptype_with_custom_metaclass_null_new(self): metaclass = _testcapi.HeapCTypeMetaclassNullNew - self.assertTrue(issubclass(metaclass, type)) + self.assertIsSubclass(metaclass, type) # Class creation from C t = _testcapi.pytype_fromspec_meta(metaclass) @@ -749,7 +717,7 @@ def test_heaptype_with_custom_metaclass_null_new(self): def test_heaptype_with_custom_metaclass_custom_new(self): metaclass = _testcapi.HeapCTypeMetaclassCustomNew - self.assertTrue(issubclass(_testcapi.HeapCTypeMetaclassCustomNew, type)) + self.assertIsSubclass(_testcapi.HeapCTypeMetaclassCustomNew, type) msg = "Metaclasses with custom tp_new are not supported." with self.assertRaisesRegex(TypeError, msg): @@ -908,8 +876,7 @@ def test_export_symbols(self): names.append('Py_FrozenMain') for name in names: - with self.subTest(name=name): - self.assertTrue(hasattr(ctypes.pythonapi, name)) + self.assertHasAttr(ctypes.pythonapi, name) def test_clear_managed_dict(self): @@ -925,175 +892,6 @@ def __init__(self): _testcapi.clear_managed_dict(c) self.assertEqual(c.__dict__, {}) - def test_function_get_code(self): - import types - - def some(): - pass - - code = _testcapi.function_get_code(some) - self.assertIsInstance(code, types.CodeType) - self.assertEqual(code, some.__code__) - - with self.assertRaises(SystemError): - _testcapi.function_get_code(None) # not a function - - def test_function_get_globals(self): - def some(): - pass - - globals_ = _testcapi.function_get_globals(some) - self.assertIsInstance(globals_, dict) - self.assertEqual(globals_, some.__globals__) - - with self.assertRaises(SystemError): - _testcapi.function_get_globals(None) # not a function - - def test_function_get_module(self): - def some(): - pass - - module = _testcapi.function_get_module(some) - self.assertIsInstance(module, str) - self.assertEqual(module, some.__module__) - - with self.assertRaises(SystemError): - _testcapi.function_get_module(None) # not a function - - def test_function_get_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - defaults = _testcapi.function_get_defaults(some) - self.assertEqual(defaults, ('p', 0, None)) - self.assertEqual(defaults, some.__defaults__) - - with self.assertRaises(SystemError): - _testcapi.function_get_defaults(None) # not a function - - def test_function_set_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - old_defaults = ('p', 0, None) - self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) - self.assertEqual(some.__defaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_defaults(some, 1) # not tuple or None - self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) - self.assertEqual(some.__defaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_defaults(1, ()) # not a function - self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) - self.assertEqual(some.__defaults__, old_defaults) - - new_defaults = ('q', 1, None) - _testcapi.function_set_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) - self.assertEqual(some.__defaults__, new_defaults) - - # Empty tuple is fine: - new_defaults = () - _testcapi.function_set_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) - self.assertEqual(some.__defaults__, new_defaults) - - class tuplesub(tuple): ... # tuple subclasses must work - - new_defaults = tuplesub(((1, 2), ['a', 'b'], None)) - _testcapi.function_set_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) - self.assertEqual(some.__defaults__, new_defaults) - - # `None` is special, it sets `defaults` to `NULL`, - # it needs special handling in `_testcapi`: - _testcapi.function_set_defaults(some, None) - self.assertEqual(_testcapi.function_get_defaults(some), None) - self.assertEqual(some.__defaults__, None) - - def test_function_get_kw_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - defaults = _testcapi.function_get_kw_defaults(some) - self.assertEqual(defaults, {'kw2': True}) - self.assertEqual(defaults, some.__kwdefaults__) - - with self.assertRaises(SystemError): - _testcapi.function_get_kw_defaults(None) # not a function - - def test_function_set_kw_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - old_defaults = {'kw2': True} - self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) - self.assertEqual(some.__kwdefaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_kw_defaults(some, 1) # not dict or None - self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) - self.assertEqual(some.__kwdefaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_kw_defaults(1, {}) # not a function - self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) - self.assertEqual(some.__kwdefaults__, old_defaults) - - new_defaults = {'kw2': (1, 2, 3)} - _testcapi.function_set_kw_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) - self.assertEqual(some.__kwdefaults__, new_defaults) - - # Empty dict is fine: - new_defaults = {} - _testcapi.function_set_kw_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) - self.assertEqual(some.__kwdefaults__, new_defaults) - - class dictsub(dict): ... # dict subclasses must work - - new_defaults = dictsub({'kw2': None}) - _testcapi.function_set_kw_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) - self.assertEqual(some.__kwdefaults__, new_defaults) - - # `None` is special, it sets `kwdefaults` to `NULL`, - # it needs special handling in `_testcapi`: - _testcapi.function_set_kw_defaults(some, None) - self.assertEqual(_testcapi.function_get_kw_defaults(some), None) - self.assertEqual(some.__kwdefaults__, None) - def test_unstable_gc_new_with_extra_data(self): class Data(_testcapi.ObjExtraData): __slots__ = ('x', 'y') @@ -1108,147 +906,6 @@ class Data(_testcapi.ObjExtraData): del d.extra self.assertIsNone(d.extra) - def test_get_type_name(self): - class MyType: - pass - - from _testcapi import ( - get_type_name, get_type_qualname, - get_type_fullyqualname, get_type_module_name) - - from collections import OrderedDict - ht = _testcapi.get_heaptype_for_name() - for cls, fullname, modname, qualname, name in ( - (int, - 'int', - 'builtins', - 'int', - 'int'), - (OrderedDict, - 'collections.OrderedDict', - 'collections', - 'OrderedDict', - 'OrderedDict'), - (ht, - '_testcapi.HeapTypeNameType', - '_testcapi', - 'HeapTypeNameType', - 'HeapTypeNameType'), - (MyType, - f'{__name__}.CAPITest.test_get_type_name..MyType', - __name__, - 'CAPITest.test_get_type_name..MyType', - 'MyType'), - ): - with self.subTest(cls=repr(cls)): - self.assertEqual(get_type_fullyqualname(cls), fullname) - self.assertEqual(get_type_module_name(cls), modname) - self.assertEqual(get_type_qualname(cls), qualname) - self.assertEqual(get_type_name(cls), name) - - # override __module__ - ht.__module__ = 'test_module' - self.assertEqual(get_type_fullyqualname(ht), 'test_module.HeapTypeNameType') - self.assertEqual(get_type_module_name(ht), 'test_module') - self.assertEqual(get_type_qualname(ht), 'HeapTypeNameType') - self.assertEqual(get_type_name(ht), 'HeapTypeNameType') - - # override __name__ and __qualname__ - MyType.__name__ = 'my_name' - MyType.__qualname__ = 'my_qualname' - self.assertEqual(get_type_fullyqualname(MyType), f'{__name__}.my_qualname') - self.assertEqual(get_type_module_name(MyType), __name__) - self.assertEqual(get_type_qualname(MyType), 'my_qualname') - self.assertEqual(get_type_name(MyType), 'my_name') - - # override also __module__ - MyType.__module__ = 'my_module' - self.assertEqual(get_type_fullyqualname(MyType), 'my_module.my_qualname') - self.assertEqual(get_type_module_name(MyType), 'my_module') - self.assertEqual(get_type_qualname(MyType), 'my_qualname') - self.assertEqual(get_type_name(MyType), 'my_name') - - # PyType_GetFullyQualifiedName() ignores the module if it's "builtins" - # or "__main__" of it is not a string - MyType.__module__ = 'builtins' - self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - MyType.__module__ = '__main__' - self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - MyType.__module__ = 123 - self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - - def test_get_base_by_token(self): - def get_base_by_token(src, key, comparable=True): - def run(use_mro): - find_first = _testcapi.pytype_getbasebytoken - ret1, result = find_first(src, key, use_mro, True) - ret2, no_result = find_first(src, key, use_mro, False) - self.assertIn(ret1, (0, 1)) - self.assertEqual(ret1, result is not None) - self.assertEqual(ret1, ret2) - self.assertIsNone(no_result) - return result - - found_in_mro = run(True) - found_in_bases = run(False) - if comparable: - self.assertIs(found_in_mro, found_in_bases) - return found_in_mro - return found_in_mro, found_in_bases - - create_type = _testcapi.create_type_with_token - get_token = _testcapi.get_tp_token - - Py_TP_USE_SPEC = _testcapi.Py_TP_USE_SPEC - self.assertEqual(Py_TP_USE_SPEC, 0) - - A1 = create_type('_testcapi.A1', Py_TP_USE_SPEC) - self.assertTrue(get_token(A1) != Py_TP_USE_SPEC) - - B1 = create_type('_testcapi.B1', id(self)) - self.assertTrue(get_token(B1) == id(self)) - - tokenA1 = get_token(A1) - # find A1 from A1 - found = get_base_by_token(A1, tokenA1) - self.assertIs(found, A1) - - # no token in static types - STATIC = type(1) - self.assertEqual(get_token(STATIC), 0) - found = get_base_by_token(STATIC, tokenA1) - self.assertIs(found, None) - - # no token in pure subtypes - class A2(A1): pass - self.assertEqual(get_token(A2), 0) - # find A1 - class Z(STATIC, B1, A2): pass - found = get_base_by_token(Z, tokenA1) - self.assertIs(found, A1) - - # searching for NULL token is an error - with self.assertRaises(SystemError): - get_base_by_token(Z, 0) - with self.assertRaises(SystemError): - get_base_by_token(STATIC, 0) - - # share the token with A1 - C1 = create_type('_testcapi.C1', tokenA1) - self.assertTrue(get_token(C1) == tokenA1) - - # find C1 first by shared token - class Z(C1, A2): pass - found = get_base_by_token(Z, tokenA1) - self.assertIs(found, C1) - # B1 not found - found = get_base_by_token(Z, get_token(B1)) - self.assertIs(found, None) - - with self.assertRaises(TypeError): - _testcapi.pytype_getbasebytoken( - 'not a type', id(self), True, False) - def test_gen_get_code(self): def genf(): yield gen = genf() @@ -1457,125 +1114,6 @@ def test_pyobject_getitemdata_error(self): _testcapi.pyobject_getitemdata(0) - def test_function_get_closure(self): - from types import CellType - - def regular_function(): ... - def unused_one_level(arg1): - def inner(arg2, arg3): ... - return inner - def unused_two_levels(arg1, arg2): - def decorator(arg3, arg4): - def inner(arg5, arg6): ... - return inner - return decorator - def with_one_level(arg1): - def inner(arg2, arg3): - return arg1 + arg2 + arg3 - return inner - def with_two_levels(arg1, arg2): - def decorator(arg3, arg4): - def inner(arg5, arg6): - return arg1 + arg2 + arg3 + arg4 + arg5 + arg6 - return inner - return decorator - - # Functions without closures: - self.assertIsNone(_testcapi.function_get_closure(regular_function)) - self.assertIsNone(regular_function.__closure__) - - func = unused_one_level(1) - closure = _testcapi.function_get_closure(func) - self.assertIsNone(closure) - self.assertIsNone(func.__closure__) - - func = unused_two_levels(1, 2)(3, 4) - closure = _testcapi.function_get_closure(func) - self.assertIsNone(closure) - self.assertIsNone(func.__closure__) - - # Functions with closures: - func = with_one_level(5) - closure = _testcapi.function_get_closure(func) - self.assertEqual(closure, func.__closure__) - self.assertIsInstance(closure, tuple) - self.assertEqual(len(closure), 1) - self.assertEqual(len(closure), len(func.__code__.co_freevars)) - self.assertTrue(all(isinstance(cell, CellType) for cell in closure)) - self.assertTrue(closure[0].cell_contents, 5) - - func = with_two_levels(1, 2)(3, 4) - closure = _testcapi.function_get_closure(func) - self.assertEqual(closure, func.__closure__) - self.assertIsInstance(closure, tuple) - self.assertEqual(len(closure), 4) - self.assertEqual(len(closure), len(func.__code__.co_freevars)) - self.assertTrue(all(isinstance(cell, CellType) for cell in closure)) - self.assertEqual([cell.cell_contents for cell in closure], - [1, 2, 3, 4]) - - def test_function_get_closure_error(self): - with self.assertRaises(SystemError): - _testcapi.function_get_closure(1) - with self.assertRaises(SystemError): - _testcapi.function_get_closure(None) - - def test_function_set_closure(self): - from types import CellType - - def function_without_closure(): ... - def function_with_closure(arg): - def inner(): - return arg - return inner - - func = function_without_closure - _testcapi.function_set_closure(func, (CellType(1), CellType(1))) - closure = _testcapi.function_get_closure(func) - self.assertEqual([c.cell_contents for c in closure], [1, 1]) - self.assertEqual([c.cell_contents for c in func.__closure__], [1, 1]) - - func = function_with_closure(1) - _testcapi.function_set_closure(func, - (CellType(1), CellType(2), CellType(3))) - closure = _testcapi.function_get_closure(func) - self.assertEqual([c.cell_contents for c in closure], [1, 2, 3]) - self.assertEqual([c.cell_contents for c in func.__closure__], [1, 2, 3]) - - def test_function_set_closure_none(self): - def function_without_closure(): ... - def function_with_closure(arg): - def inner(): - return arg - return inner - - _testcapi.function_set_closure(function_without_closure, None) - self.assertIsNone( - _testcapi.function_get_closure(function_without_closure)) - self.assertIsNone(function_without_closure.__closure__) - - _testcapi.function_set_closure(function_with_closure, None) - self.assertIsNone( - _testcapi.function_get_closure(function_with_closure)) - self.assertIsNone(function_with_closure.__closure__) - - def test_function_set_closure_errors(self): - def function_without_closure(): ... - - with self.assertRaises(SystemError): - _testcapi.function_set_closure(None, ()) # not a function - - with self.assertRaises(SystemError): - _testcapi.function_set_closure(function_without_closure, 1) - self.assertIsNone(function_without_closure.__closure__) # no change - - # NOTE: this works, but goes against the docs: - _testcapi.function_set_closure(function_without_closure, (1, 2)) - self.assertEqual( - _testcapi.function_get_closure(function_without_closure), (1, 2)) - self.assertEqual(function_without_closure.__closure__, (1, 2)) - - class TestPendingCalls(unittest.TestCase): # See the comment in ceval.c (at the "handle_eval_breaker" label) @@ -2139,28 +1677,27 @@ async def foo(arg): return await arg # Py 3.5 self.assertEqual(ret, 0) self.assertEqual(pickle.load(f), {'a': '123x', 'b': '123'}) + # _testcapi cannot be imported in a subinterpreter on a Free Threaded build + @support.requires_gil_enabled() def test_py_config_isoloated_per_interpreter(self): # A config change in one interpreter must not leak to out to others. # # This test could verify ANY config value, it just happens to have been # written around the time of int_max_str_digits. Refactoring is okay. code = """if 1: - import sys, _testinternalcapi + import sys, _testcapi # Any config value would do, this happens to be the one being # double checked at the time this test was written. - config = _testinternalcapi.get_config() - config['int_max_str_digits'] = 55555 - config['parse_argv'] = 0 - _testinternalcapi.set_config(config) - sub_value = _testinternalcapi.get_config()['int_max_str_digits'] + _testcapi.config_set('int_max_str_digits', 55555) + sub_value = _testcapi.config_get('int_max_str_digits') assert sub_value == 55555, sub_value """ - before_config = _testinternalcapi.get_config() - assert before_config['int_max_str_digits'] != 55555 + before_config = _testcapi.config_get('int_max_str_digits') + assert before_config != 55555 self.assertEqual(support.run_in_subinterp(code), 0, 'subinterp code failure, check stderr.') - after_config = _testinternalcapi.get_config() + after_config = _testcapi.config_get('int_max_str_digits') self.assertIsNot( before_config, after_config, "Expected get_config() to return a new dict on each call") @@ -2363,7 +1900,7 @@ def test_mutate_exception(self): support.run_in_subinterp("import binascii; binascii.Error.foobar = 'foobar'") - self.assertFalse(hasattr(binascii.Error, "foobar")) + self.assertNotHasAttr(binascii.Error, "foobar") @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") # gh-117649: The free-threaded build does not currently support sharing @@ -2918,39 +2455,6 @@ def test_linked_lifecycle_link_incref_unlink_decref(self): 0, get_refcount(interpid)) -class BuiltinStaticTypesTests(unittest.TestCase): - - TYPES = [ - object, - type, - int, - str, - dict, - type(None), - bool, - BaseException, - Exception, - Warning, - DeprecationWarning, # Warning subclass - ] - - def test_tp_bases_is_set(self): - # PyTypeObject.tp_bases is documented as public API. - # See https://github.com/python/cpython/issues/105020. - for typeobj in self.TYPES: - with self.subTest(typeobj): - bases = _testcapi.type_get_tp_bases(typeobj) - self.assertIsNot(bases, None) - - def test_tp_mro_is_set(self): - # PyTypeObject.tp_bases is documented as public API. - # See https://github.com/python/cpython/issues/105020. - for typeobj in self.TYPES: - with self.subTest(typeobj): - mro = _testcapi.type_get_tp_mro(typeobj) - self.assertIsNot(mro, None) - - class TestStaticTypes(unittest.TestCase): _has_run = False @@ -3335,6 +2839,50 @@ def run(self): self.assertEqual(len(set(py_thread_ids)), len(py_thread_ids), py_thread_ids) +class TestVersions(unittest.TestCase): + full_cases = ( + (3, 4, 1, 0xA, 2, 0x030401a2), + (3, 10, 0, 0xF, 0, 0x030a00f0), + (0x103, 0x10B, 0xFF00, -1, 0xF0, 0x030b00f0), # test masking + ) + xy_cases = ( + (3, 4, 0x03040000), + (3, 10, 0x030a0000), + (0x103, 0x10B, 0x030b0000), # test masking + ) + + def test_pack_full_version(self): + for *args, expected in self.full_cases: + with self.subTest(hexversion=hex(expected)): + result = _testlimitedcapi.pack_full_version(*args) + self.assertEqual(result, expected) + + def test_pack_version(self): + for *args, expected in self.xy_cases: + with self.subTest(hexversion=hex(expected)): + result = _testlimitedcapi.pack_version(*args) + self.assertEqual(result, expected) + + def test_pack_full_version_ctypes(self): + ctypes = import_helper.import_module('ctypes') + ctypes_func = ctypes.pythonapi.Py_PACK_FULL_VERSION + ctypes_func.restype = ctypes.c_uint32 + ctypes_func.argtypes = [ctypes.c_int] * 5 + for *args, expected in self.full_cases: + with self.subTest(hexversion=hex(expected)): + result = ctypes_func(*args) + self.assertEqual(result, expected) + + def test_pack_version_ctypes(self): + ctypes = import_helper.import_module('ctypes') + ctypes_func = ctypes.pythonapi.Py_PACK_VERSION + ctypes_func.restype = ctypes.c_uint32 + ctypes_func.argtypes = [ctypes.c_int] * 2 + for *args, expected in self.xy_cases: + with self.subTest(hexversion=hex(expected)): + result = ctypes_func(*args) + self.assertEqual(result, expected) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_object.py b/Lib/test/test_capi/test_object.py index b0d39937fd865f..5d0a383de64520 100644 --- a/Lib/test/test_capi/test_object.py +++ b/Lib/test/test_capi/test_object.py @@ -1,9 +1,12 @@ import enum +import textwrap import unittest from test import support from test.support import import_helper from test.support import os_helper from test.support import threading_helper +from test.support.script_helper import assert_python_failure + _testlimitedcapi = import_helper.import_module('_testlimitedcapi') _testcapi = import_helper.import_module('_testcapi') @@ -170,5 +173,42 @@ def silly_func(obj): self.assertTrue(_testinternalcapi.has_deferred_refcount(silly_list)) +class CAPITest(unittest.TestCase): + def check_negative_refcount(self, code): + # bpo-35059: Check that Py_DECREF() reports the correct filename + # when calling _Py_NegativeRefcount() to abort Python. + code = textwrap.dedent(code) + rc, out, err = assert_python_failure('-c', code) + self.assertRegex(err, + br'object\.c:[0-9]+: ' + br'_Py_NegativeRefcount: Assertion failed: ' + br'object has negative ref count') + + @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), + 'need _testcapi.negative_refcount()') + def test_negative_refcount(self): + code = """ + import _testcapi + from test import support + + with support.SuppressCrashReport(): + _testcapi.negative_refcount() + """ + self.check_negative_refcount(code) + + @unittest.skipUnless(hasattr(_testcapi, 'decref_freed_object'), + 'need _testcapi.decref_freed_object()') + @support.skip_if_sanitizer("use after free on purpose", + address=True, memory=True, ub=True) + def test_decref_freed_object(self): + code = """ + import _testcapi + from test import support + + with support.SuppressCrashReport(): + _testcapi.decref_freed_object() + """ + self.check_negative_refcount(code) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index 4cf9b66170c055..02e534caec1162 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -1,4 +1,5 @@ import contextlib +import itertools import sys import textwrap import unittest @@ -8,114 +9,22 @@ import _opcode from test.support import (script_helper, requires_specialization, - import_helper, Py_GIL_DISABLED) + import_helper, Py_GIL_DISABLED, requires_jit_enabled, + reset_code) _testinternalcapi = import_helper.import_module("_testinternalcapi") from _testinternalcapi import TIER2_THRESHOLD -@contextlib.contextmanager -def temporary_optimizer(opt): - old_opt = _testinternalcapi.get_optimizer() - _testinternalcapi.set_optimizer(opt) - try: - yield - finally: - _testinternalcapi.set_optimizer(old_opt) - @contextlib.contextmanager def clear_executors(func): # Clear executors in func before and after running a block - func.__code__ = func.__code__.replace() + reset_code(func) try: yield finally: - func.__code__ = func.__code__.replace() - - -@requires_specialization -@unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") -class TestOptimizerAPI(unittest.TestCase): - - def test_new_counter_optimizer_dealloc(self): - # See gh-108727 - def f(): - _testinternalcapi.new_counter_optimizer() - - f() - - def test_get_set_optimizer(self): - old = _testinternalcapi.get_optimizer() - opt = _testinternalcapi.new_counter_optimizer() - try: - _testinternalcapi.set_optimizer(opt) - self.assertEqual(_testinternalcapi.get_optimizer(), opt) - _testinternalcapi.set_optimizer(None) - self.assertEqual(_testinternalcapi.get_optimizer(), None) - finally: - _testinternalcapi.set_optimizer(old) - - - def test_counter_optimizer(self): - # Generate a new function at each call - ns = {} - exec(textwrap.dedent(f""" - def loop(): - for _ in range({TIER2_THRESHOLD + 1000}): - pass - """), ns, ns) - loop = ns['loop'] - - for repeat in range(5): - opt = _testinternalcapi.new_counter_optimizer() - with temporary_optimizer(opt): - self.assertEqual(opt.get_count(), 0) - with clear_executors(loop): - loop() - self.assertEqual(opt.get_count(), 1001) - - def test_long_loop(self): - "Check that we aren't confused by EXTENDED_ARG" - - # Generate a new function at each call - ns = {} - exec(textwrap.dedent(f""" - def nop(): - pass - - def long_loop(): - for _ in range({TIER2_THRESHOLD + 20}): - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - """), ns, ns) - long_loop = ns['long_loop'] - - opt = _testinternalcapi.new_counter_optimizer() - with temporary_optimizer(opt): - self.assertEqual(opt.get_count(), 0) - long_loop() - self.assertEqual(opt.get_count(), 21) # Need iterations to warm up - - def test_code_restore_for_ENTER_EXECUTOR(self): - def testfunc(x): - i = 0 - while i < x: - i += 1 - - opt = _testinternalcapi.new_counter_optimizer() - with temporary_optimizer(opt): - testfunc(1000) - code, replace_code = testfunc.__code__, testfunc.__code__.replace() - self.assertEqual(code, replace_code) - self.assertEqual(hash(code), hash(replace_code)) + reset_code(func) def get_first_executor(func): @@ -140,18 +49,9 @@ def get_opnames(ex): @requires_specialization @unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") +@requires_jit_enabled class TestExecutorInvalidation(unittest.TestCase): - def setUp(self): - self.old = _testinternalcapi.get_optimizer() - self.opt = _testinternalcapi.new_counter_optimizer() - _testinternalcapi.set_optimizer(self.opt) - - def tearDown(self): - _testinternalcapi.set_optimizer(self.old) - def test_invalidate_object(self): # Generate a new set of functions at each call ns = {} @@ -195,9 +95,7 @@ def f(): pass """), ns, ns) f = ns['f'] - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - f() + f() exe = get_first_executor(f) self.assertIsNotNone(exe) self.assertTrue(exe.is_valid()) @@ -208,9 +106,7 @@ def test_sys__clear_internal_caches(self): def f(): for _ in range(TIER2_THRESHOLD): pass - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - f() + f() exe = get_first_executor(f) self.assertIsNotNone(exe) self.assertTrue(exe.is_valid()) @@ -222,8 +118,7 @@ def f(): @requires_specialization @unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") +@requires_jit_enabled @unittest.skipIf(os.getenv("PYTHON_UOPS_OPTIMIZE") == "0", "Needs uop optimizer to run.") class TestUops(unittest.TestCase): @@ -233,9 +128,7 @@ def testfunc(x): while i < x: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -281,11 +174,9 @@ def many_vars(): """), ns, ns) many_vars = ns["many_vars"] - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - ex = get_first_executor(many_vars) - self.assertIsNone(ex) - many_vars() + ex = get_first_executor(many_vars) + self.assertIsNone(ex) + many_vars() ex = get_first_executor(many_vars) self.assertIsNotNone(ex) @@ -304,10 +195,7 @@ def testfunc(x): while i < x: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -320,9 +208,7 @@ def testfunc(n): while i < n: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -335,9 +221,7 @@ def testfunc(a): if x is None: x = 0 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(range(TIER2_THRESHOLD)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -352,9 +236,7 @@ def testfunc(a): if x is not None: x = 0 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(range(TIER2_THRESHOLD)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -368,9 +250,7 @@ def testfunc(n): while not i >= n: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -383,9 +263,7 @@ def testfunc(n): while i < n: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -403,9 +281,7 @@ def testfunc(n): a += 1 return a - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -421,10 +297,8 @@ def testfunc(n): total += i return total - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - total = testfunc(TIER2_THRESHOLD) - self.assertEqual(total, sum(range(TIER2_THRESHOLD))) + total = testfunc(TIER2_THRESHOLD) + self.assertEqual(total, sum(range(TIER2_THRESHOLD))) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -442,11 +316,9 @@ def testfunc(a): total += i return total - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - a = list(range(TIER2_THRESHOLD)) - total = testfunc(a) - self.assertEqual(total, sum(a)) + a = list(range(TIER2_THRESHOLD)) + total = testfunc(a) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -464,11 +336,9 @@ def testfunc(a): total += i return total - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - a = tuple(range(TIER2_THRESHOLD)) - total = testfunc(a) - self.assertEqual(total, sum(a)) + a = tuple(range(TIER2_THRESHOLD)) + total = testfunc(a) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -484,14 +354,12 @@ def testfunc(it): for x in it: pass - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - a = [1, 2, 3] - it = iter(a) - testfunc(it) - a.append(4) - with self.assertRaises(StopIteration): - next(it) + a = [1, 2, 3] + it = iter(a) + testfunc(it) + a.append(4) + with self.assertRaises(StopIteration): + next(it) def test_call_py_exact_args(self): def testfunc(n): @@ -500,9 +368,7 @@ def dummy(x): for i in range(n): dummy(i) - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -518,9 +384,7 @@ def testfunc(n): else: i = 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -546,9 +410,7 @@ def testfunc(n, m): x += 1000*i + j return x - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - x = testfunc(TIER2_THRESHOLD, TIER2_THRESHOLD) + x = testfunc(TIER2_THRESHOLD, TIER2_THRESHOLD) self.assertEqual(x, sum(range(TIER2_THRESHOLD)) * TIER2_THRESHOLD * 1001) @@ -573,9 +435,7 @@ def testfunc(n): bits += 1 return bits - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - x = testfunc(TIER2_THRESHOLD * 2) + x = testfunc(TIER2_THRESHOLD * 2) self.assertEqual(x, TIER2_THRESHOLD * 5) ex = get_first_executor(testfunc) @@ -588,16 +448,12 @@ def testfunc(n): @requires_specialization @unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") +@requires_jit_enabled @unittest.skipIf(os.getenv("PYTHON_UOPS_OPTIMIZE") == "0", "Needs uop optimizer to run.") class TestUopsOptimization(unittest.TestCase): def _run_with_optimizer(self, testfunc, arg): - res = None - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - res = testfunc(arg) + res = testfunc(arg) ex = get_first_executor(testfunc) return res, ex @@ -631,10 +487,7 @@ def testfunc(loops): num += 1 return a - opt = _testinternalcapi.new_uop_optimizer() - res = None - with temporary_optimizer(opt): - res = testfunc(TIER2_THRESHOLD) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -655,10 +508,7 @@ def testfunc(loops): num += 1 return x - opt = _testinternalcapi.new_uop_optimizer() - res = None - with temporary_optimizer(opt): - res = testfunc(TIER2_THRESHOLD) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -750,16 +600,14 @@ def testfunc(n): for i in range(n): dummy(i) - opt = _testinternalcapi.new_uop_optimizer() # Trigger specialization testfunc(8) - with temporary_optimizer(opt): - del dummy - gc.collect() + del dummy + gc.collect() - def dummy(x): - return x + 2 - testfunc(32) + def dummy(x): + return x + 2 + testfunc(32) ex = get_first_executor(testfunc) # Honestly as long as it doesn't crash it's fine. @@ -792,16 +640,14 @@ def testfunc(n): x = range(i) return x - opt = _testinternalcapi.new_uop_optimizer() - _testinternalcapi.set_optimizer(opt) testfunc(_testinternalcapi.TIER2_THRESHOLD) ex = get_first_executor(testfunc) assert ex is not None uops = get_opnames(ex) assert "_LOAD_GLOBAL_BUILTINS" not in uops - assert "_LOAD_CONST_INLINE_BORROW_WITH_NULL" in uops - """)) + assert "_LOAD_CONST_INLINE_BORROW" in uops + """), PYTHON_JIT="1") self.assertEqual(result[0].rc, 0, result) def test_float_add_constant_propagation(self): @@ -1488,9 +1334,7 @@ def testfunc(n): # Only works on functions promoted to constants global_identity(i) - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -1511,6 +1355,87 @@ def test_jit_error_pops(self): with self.assertRaises(TypeError): {item for item in items} + def test_power_type_depends_on_input_values(self): + template = textwrap.dedent(""" + import _testinternalcapi + + L, R, X, Y = {l}, {r}, {x}, {y} + + def check(actual: complex, expected: complex) -> None: + assert actual == expected, (actual, expected) + assert type(actual) is type(expected), (actual, expected) + + def f(l: complex, r: complex) -> None: + expected_local_local = pow(l, r) + pow(l, r) + expected_const_local = pow(L, r) + pow(L, r) + expected_local_const = pow(l, R) + pow(l, R) + expected_const_const = pow(L, R) + pow(L, R) + for _ in range(_testinternalcapi.TIER2_THRESHOLD): + # Narrow types: + l + l, r + r + # The powers produce results, and the addition is unguarded: + check(l ** r + l ** r, expected_local_local) + check(L ** r + L ** r, expected_const_local) + check(l ** R + l ** R, expected_local_const) + check(L ** R + L ** R, expected_const_const) + + # JIT for one pair of values... + f(L, R) + # ...then run with another: + f(X, Y) + """) + interesting = [ + (1, 1), # int ** int -> int + (1, -1), # int ** int -> float + (1.0, 1), # float ** int -> float + (1, 1.0), # int ** float -> float + (-1, 0.5), # int ** float -> complex + (1.0, 1.0), # float ** float -> float + (-1.0, 0.5), # float ** float -> complex + ] + for (l, r), (x, y) in itertools.product(interesting, repeat=2): + s = template.format(l=l, r=r, x=x, y=y) + with self.subTest(l=l, r=r, x=x, y=y): + script_helper.assert_python_ok("-c", s) + + def test_symbols_flow_through_tuples(self): + def testfunc(n): + for _ in range(n): + a = 1 + b = 2 + t = a, b + x, y = t + r = x + y + return r + + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, 3) + self.assertIsNotNone(ex) + uops = get_opnames(ex) + self.assertIn("_BINARY_OP_ADD_INT", uops) + self.assertNotIn("_GUARD_BOTH_INT", uops) + self.assertNotIn("_GUARD_NOS_INT", uops) + self.assertNotIn("_GUARD_TOS_INT", uops) + + def test_decref_escapes(self): + class Convert9999ToNone: + def __del__(self): + ns = sys._getframe(1).f_locals + if ns["i"] == _testinternalcapi.TIER2_THRESHOLD: + ns["i"] = None + + def crash_addition(): + try: + for i in range(_testinternalcapi.TIER2_THRESHOLD + 1): + n = Convert9999ToNone() + i + i # Remove guards for i. + n = None # Change i. + i + i # This crashed when we didn't treat DECREF as escaping (gh-124483) + except TypeError: + pass + + crash_addition() + def global_identity(x): return x diff --git a/Lib/test/test_capi/test_sys.py b/Lib/test/test_capi/test_sys.py index 54a8e026d883d4..d3a9b378e7769a 100644 --- a/Lib/test/test_capi/test_sys.py +++ b/Lib/test/test_capi/test_sys.py @@ -51,7 +51,7 @@ def test_sys_setobject(self): self.assertEqual(setobject(b'newattr', value2), 0) self.assertIs(sys.newattr, value2) self.assertEqual(setobject(b'newattr', NULL), 0) - self.assertFalse(hasattr(sys, 'newattr')) + self.assertNotHasAttr(sys, 'newattr') self.assertEqual(setobject(b'newattr', NULL), 0) finally: with contextlib.suppress(AttributeError): @@ -60,7 +60,7 @@ def test_sys_setobject(self): self.assertEqual(setobject('\U0001f40d'.encode(), value), 0) self.assertIs(getattr(sys, '\U0001f40d'), value) self.assertEqual(setobject('\U0001f40d'.encode(), NULL), 0) - self.assertFalse(hasattr(sys, '\U0001f40d')) + self.assertNotHasAttr(sys, '\U0001f40d') finally: with contextlib.suppress(AttributeError): delattr(sys, '\U0001f40d') diff --git a/Lib/test/test_capi/test_type.py b/Lib/test/test_capi/test_type.py index 54c83e09f892a0..7e5d013d737ab0 100644 --- a/Lib/test/test_capi/test_type.py +++ b/Lib/test/test_capi/test_type.py @@ -1,10 +1,184 @@ -from test.support import import_helper +from test.support import import_helper, Py_GIL_DISABLED, refleak_helper import unittest _testcapi = import_helper.import_module('_testcapi') +class BuiltinStaticTypesTests(unittest.TestCase): + + TYPES = [ + object, + type, + int, + str, + dict, + type(None), + bool, + BaseException, + Exception, + Warning, + DeprecationWarning, # Warning subclass + ] + + def test_tp_bases_is_set(self): + # PyTypeObject.tp_bases is documented as public API. + # See https://github.com/python/cpython/issues/105020. + for typeobj in self.TYPES: + with self.subTest(typeobj): + bases = _testcapi.type_get_tp_bases(typeobj) + self.assertIsNot(bases, None) + + def test_tp_mro_is_set(self): + # PyTypeObject.tp_bases is documented as public API. + # See https://github.com/python/cpython/issues/105020. + for typeobj in self.TYPES: + with self.subTest(typeobj): + mro = _testcapi.type_get_tp_mro(typeobj) + self.assertIsNot(mro, None) + + class TypeTests(unittest.TestCase): + def test_get_type_name(self): + class MyType: + pass + + from _testcapi import ( + get_type_name, get_type_qualname, + get_type_fullyqualname, get_type_module_name) + + from collections import OrderedDict + ht = _testcapi.get_heaptype_for_name() + for cls, fullname, modname, qualname, name in ( + (int, + 'int', + 'builtins', + 'int', + 'int'), + (OrderedDict, + 'collections.OrderedDict', + 'collections', + 'OrderedDict', + 'OrderedDict'), + (ht, + '_testcapi.HeapTypeNameType', + '_testcapi', + 'HeapTypeNameType', + 'HeapTypeNameType'), + (MyType, + f'{__name__}.TypeTests.test_get_type_name..MyType', + __name__, + 'TypeTests.test_get_type_name..MyType', + 'MyType'), + ): + with self.subTest(cls=repr(cls)): + self.assertEqual(get_type_fullyqualname(cls), fullname) + self.assertEqual(get_type_module_name(cls), modname) + self.assertEqual(get_type_qualname(cls), qualname) + self.assertEqual(get_type_name(cls), name) + + # override __module__ + ht.__module__ = 'test_module' + self.assertEqual(get_type_fullyqualname(ht), 'test_module.HeapTypeNameType') + self.assertEqual(get_type_module_name(ht), 'test_module') + self.assertEqual(get_type_qualname(ht), 'HeapTypeNameType') + self.assertEqual(get_type_name(ht), 'HeapTypeNameType') + + # override __name__ and __qualname__ + MyType.__name__ = 'my_name' + MyType.__qualname__ = 'my_qualname' + self.assertEqual(get_type_fullyqualname(MyType), f'{__name__}.my_qualname') + self.assertEqual(get_type_module_name(MyType), __name__) + self.assertEqual(get_type_qualname(MyType), 'my_qualname') + self.assertEqual(get_type_name(MyType), 'my_name') + + # override also __module__ + MyType.__module__ = 'my_module' + self.assertEqual(get_type_fullyqualname(MyType), 'my_module.my_qualname') + self.assertEqual(get_type_module_name(MyType), 'my_module') + self.assertEqual(get_type_qualname(MyType), 'my_qualname') + self.assertEqual(get_type_name(MyType), 'my_name') + + # PyType_GetFullyQualifiedName() ignores the module if it's "builtins" + # or "__main__" of it is not a string + MyType.__module__ = 'builtins' + self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') + MyType.__module__ = '__main__' + self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') + MyType.__module__ = 123 + self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') + + def test_get_base_by_token(self): + def get_base_by_token(src, key, comparable=True): + def run(use_mro): + find_first = _testcapi.pytype_getbasebytoken + ret1, result = find_first(src, key, use_mro, True) + ret2, no_result = find_first(src, key, use_mro, False) + self.assertIn(ret1, (0, 1)) + self.assertEqual(ret1, result is not None) + self.assertEqual(ret1, ret2) + self.assertIsNone(no_result) + return result + + found_in_mro = run(True) + found_in_bases = run(False) + if comparable: + self.assertIs(found_in_mro, found_in_bases) + return found_in_mro + return found_in_mro, found_in_bases + + create_type = _testcapi.create_type_with_token + get_token = _testcapi.get_tp_token + + Py_TP_USE_SPEC = _testcapi.Py_TP_USE_SPEC + self.assertEqual(Py_TP_USE_SPEC, 0) + + A1 = create_type('_testcapi.A1', Py_TP_USE_SPEC) + self.assertTrue(get_token(A1) != Py_TP_USE_SPEC) + + B1 = create_type('_testcapi.B1', id(self)) + self.assertTrue(get_token(B1) == id(self)) + + tokenA1 = get_token(A1) + # find A1 from A1 + found = get_base_by_token(A1, tokenA1) + self.assertIs(found, A1) + + # no token in static types + STATIC = type(1) + self.assertEqual(get_token(STATIC), 0) + found = get_base_by_token(STATIC, tokenA1) + self.assertIs(found, None) + + # no token in pure subtypes + class A2(A1): pass + self.assertEqual(get_token(A2), 0) + # find A1 + class Z(STATIC, B1, A2): pass + found = get_base_by_token(Z, tokenA1) + self.assertIs(found, A1) + + # searching for NULL token is an error + with self.assertRaises(SystemError): + get_base_by_token(Z, 0) + with self.assertRaises(SystemError): + get_base_by_token(STATIC, 0) + + # share the token with A1 + C1 = create_type('_testcapi.C1', tokenA1) + self.assertTrue(get_token(C1) == tokenA1) + + # find C1 first by shared token + class Z(C1, A2): pass + found = get_base_by_token(Z, tokenA1) + self.assertIs(found, C1) + # B1 not found + found = get_base_by_token(Z, get_token(B1)) + self.assertIs(found, None) + + with self.assertRaises(TypeError): + _testcapi.pytype_getbasebytoken( + 'not a type', id(self), True, False) + def test_freeze(self): # test PyType_Freeze() type_freeze = _testcapi.type_freeze @@ -37,6 +211,9 @@ class D(A, C): pass # as well type_freeze(D) + @unittest.skipIf( + Py_GIL_DISABLED and refleak_helper.hunting_for_refleaks(), + "Specialization failure triggers gh-127773") def test_freeze_meta(self): """test PyType_Freeze() with overridden MRO""" type_freeze = _testcapi.type_freeze @@ -64,3 +241,10 @@ class FreezeThis(metaclass=Meta): Base.value = 3 type_freeze(FreezeThis) self.assertEqual(FreezeThis.value, 2) + + def test_manual_heap_type(self): + # gh-128923: test that a manually allocated and initailized heap type + # works correctly + ManualHeapType = _testcapi.ManualHeapType + for i in range(100): + self.assertIsInstance(ManualHeapType(), ManualHeapType) diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 634efda354407f..b949b310ac0f5f 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -336,6 +336,8 @@ def test_osx_android_utf8(self): self.assertEqual(stdout, expected) self.assertEqual(p.returncode, 0) + @unittest.skipIf(os.environ.get("PYTHONUNBUFFERED", "0") != "0", + "Python stdio buffering is disabled.") def test_non_interactive_output_buffering(self): code = textwrap.dedent(""" import sys @@ -489,7 +491,7 @@ def test_stdout_flush_at_shutdown(self): rc, out, err = assert_python_failure('-c', code) self.assertEqual(b'', out) self.assertEqual(120, rc) - self.assertIn(b'Exception ignored on flushing sys.stdout:\n' + self.assertIn(b'Exception ignored while flushing sys.stdout:\n' b'OSError: '.replace(b'\n', os.linesep.encode()), err) @@ -1012,7 +1014,7 @@ def test_parsing_error(self): stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - err_msg = "unknown option --unknown-option\nusage: " + err_msg = "Unknown option: --unknown-option\nusage: " self.assertTrue(proc.stderr.startswith(err_msg), proc.stderr) self.assertNotEqual(proc.returncode, 0) diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index f30107225ff612..e7f3e46c1868f7 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -88,6 +88,8 @@ def _make_test_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename, importlib.invalidate_caches() return to_return + +@support.force_not_colorized_test_class class CmdLineTest(unittest.TestCase): def _check_output(self, script_name, exit_code, data, expected_file, expected_argv0, @@ -659,7 +661,8 @@ def test_syntaxerror_invalid_escape_sequence_multi_line(self): stderr.splitlines()[-3:], [ b' foo = """\\q"""', b' ^^^^^^^^', - b'SyntaxError: invalid escape sequence \'\\q\'' + b'SyntaxError: "\\q" is an invalid escape sequence. ' + b'Did you mean "\\\\q"? A raw string is also an option.' ], ) diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py index 2a1b26e8a1ffd1..69c1ee0690d269 100644 --- a/Lib/test/test_code.py +++ b/Lib/test/test_code.py @@ -215,6 +215,8 @@ from test.support import threading_helper, import_helper from test.support.bytecode_helper import instructions_with_positions from opcode import opmap, opname +from _testcapi import code_offset_to_line + COPY_FREE_VARS = opmap['COPY_FREE_VARS'] @@ -427,14 +429,14 @@ def test_invalid_bytecode(self): def foo(): pass - # assert that opcode 229 is invalid - self.assertEqual(opname[229], '<229>') + # assert that opcode 135 is invalid + self.assertEqual(opname[135], '<135>') - # change first opcode to 0xeb (=229) + # change first opcode to 0x87 (=135) foo.__code__ = foo.__code__.replace( - co_code=b'\xe5' + foo.__code__.co_code[1:]) + co_code=b'\x87' + foo.__code__.co_code[1:]) - msg = "unknown opcode 229" + msg = "unknown opcode 135" with self.assertRaisesRegex(SystemError, msg): foo() @@ -896,6 +898,44 @@ async def async_func(): rc, out, err = assert_python_ok('-OO', '-c', code) + def test_co_branches(self): + + def get_line_branches(func): + code = func.__code__ + base = code.co_firstlineno + return [ + ( + code_offset_to_line(code, src) - base, + code_offset_to_line(code, left) - base, + code_offset_to_line(code, right) - base + ) for (src, left, right) in + code.co_branches() + ] + + def simple(x): + if x: + A + else: + B + + self.assertEqual( + get_line_branches(simple), + [(1,2,4)]) + + def with_extended_args(x): + if x: + A.x; A.x; A.x; A.x; A.x; A.x; + A.x; A.x; A.x; A.x; A.x; A.x; + A.x; A.x; A.x; A.x; A.x; A.x; + A.x; A.x; A.x; A.x; A.x; A.x; + A.x; A.x; A.x; A.x; A.x; A.x; + else: + B + + self.assertEqual( + get_line_branches(with_extended_args), + [(1,2,8)]) + if check_impl_detail(cpython=True) and ctypes is not None: py = ctypes.pythonapi freefunc = ctypes.CFUNCTYPE(None,ctypes.c_voidp) diff --git a/Lib/test/test_code_module.py b/Lib/test/test_code_module.py index 37c7bc772ed8c7..20b960ce8d1e02 100644 --- a/Lib/test/test_code_module.py +++ b/Lib/test/test_code_module.py @@ -5,9 +5,9 @@ from textwrap import dedent from contextlib import ExitStack from unittest import mock +from test.support import force_not_colorized_test_class from test.support import import_helper - code = import_helper.import_module('code') @@ -30,6 +30,7 @@ def mock_sys(self): del self.sysmod.ps2 +@force_not_colorized_test_class class TestInteractiveConsole(unittest.TestCase, MockSys): maxDiff = None diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 787bd1b6a79e20..0eefc22d11bce0 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -282,7 +282,7 @@ def test_warning(self): # Test that the warning is only returned once. with warnings_helper.check_warnings( ('"is" with \'str\' literal', SyntaxWarning), - ("invalid escape sequence", SyntaxWarning), + ('"\\\\e" is an invalid escape sequence', SyntaxWarning), ) as w: compile_command(r"'\e' is 0") self.assertEqual(len(w.warnings), 2) diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index a24d3e3ea142b7..1e93530398be79 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -742,11 +742,11 @@ def validate_isinstance(self, abc, name): C = type('C', (object,), {'__hash__': None}) setattr(C, name, stub) self.assertIsInstance(C(), abc) - self.assertTrue(issubclass(C, abc)) + self.assertIsSubclass(C, abc) C = type('C', (object,), {'__hash__': None}) self.assertNotIsInstance(C(), abc) - self.assertFalse(issubclass(C, abc)) + self.assertNotIsSubclass(C, abc) def validate_comparison(self, instance): ops = ['lt', 'gt', 'le', 'ge', 'ne', 'or', 'and', 'xor', 'sub'] @@ -812,12 +812,12 @@ def __await__(self): non_samples = [None, int(), gen(), object()] for x in non_samples: self.assertNotIsInstance(x, Awaitable) - self.assertFalse(issubclass(type(x), Awaitable), repr(type(x))) + self.assertNotIsSubclass(type(x), Awaitable) samples = [Bar(), MinimalCoro()] for x in samples: self.assertIsInstance(x, Awaitable) - self.assertTrue(issubclass(type(x), Awaitable)) + self.assertIsSubclass(type(x), Awaitable) c = coro() # Iterable coroutines (generators with CO_ITERABLE_COROUTINE @@ -831,8 +831,8 @@ def __await__(self): class CoroLike: pass Coroutine.register(CoroLike) - self.assertTrue(isinstance(CoroLike(), Awaitable)) - self.assertTrue(issubclass(CoroLike, Awaitable)) + self.assertIsInstance(CoroLike(), Awaitable) + self.assertIsSubclass(CoroLike, Awaitable) CoroLike = None support.gc_collect() # Kill CoroLike to clean-up ABCMeta cache @@ -864,12 +864,12 @@ def __await__(self): non_samples = [None, int(), gen(), object(), Bar()] for x in non_samples: self.assertNotIsInstance(x, Coroutine) - self.assertFalse(issubclass(type(x), Coroutine), repr(type(x))) + self.assertNotIsSubclass(type(x), Coroutine) samples = [MinimalCoro()] for x in samples: self.assertIsInstance(x, Awaitable) - self.assertTrue(issubclass(type(x), Awaitable)) + self.assertIsSubclass(type(x), Awaitable) c = coro() # Iterable coroutines (generators with CO_ITERABLE_COROUTINE @@ -890,8 +890,8 @@ def close(self): pass def __await__(self): pass - self.assertTrue(isinstance(CoroLike(), Coroutine)) - self.assertTrue(issubclass(CoroLike, Coroutine)) + self.assertIsInstance(CoroLike(), Coroutine) + self.assertIsSubclass(CoroLike, Coroutine) class CoroLike: def send(self, value): @@ -900,15 +900,15 @@ def close(self): pass def __await__(self): pass - self.assertFalse(isinstance(CoroLike(), Coroutine)) - self.assertFalse(issubclass(CoroLike, Coroutine)) + self.assertNotIsInstance(CoroLike(), Coroutine) + self.assertNotIsSubclass(CoroLike, Coroutine) def test_Hashable(self): # Check some non-hashables non_samples = [bytearray(), list(), set(), dict()] for x in non_samples: self.assertNotIsInstance(x, Hashable) - self.assertFalse(issubclass(type(x), Hashable), repr(type(x))) + self.assertNotIsSubclass(type(x), Hashable) # Check some hashables samples = [None, int(), float(), complex(), @@ -918,14 +918,14 @@ def test_Hashable(self): ] for x in samples: self.assertIsInstance(x, Hashable) - self.assertTrue(issubclass(type(x), Hashable), repr(type(x))) + self.assertIsSubclass(type(x), Hashable) self.assertRaises(TypeError, Hashable) # Check direct subclassing class H(Hashable): def __hash__(self): return super().__hash__() self.assertEqual(hash(H()), 0) - self.assertFalse(issubclass(int, H)) + self.assertNotIsSubclass(int, H) self.validate_abstract_methods(Hashable, '__hash__') self.validate_isinstance(Hashable, '__hash__') @@ -933,13 +933,13 @@ def test_AsyncIterable(self): class AI: def __aiter__(self): return self - self.assertTrue(isinstance(AI(), AsyncIterable)) - self.assertTrue(issubclass(AI, AsyncIterable)) + self.assertIsInstance(AI(), AsyncIterable) + self.assertIsSubclass(AI, AsyncIterable) # Check some non-iterables non_samples = [None, object, []] for x in non_samples: self.assertNotIsInstance(x, AsyncIterable) - self.assertFalse(issubclass(type(x), AsyncIterable), repr(type(x))) + self.assertNotIsSubclass(type(x), AsyncIterable) self.validate_abstract_methods(AsyncIterable, '__aiter__') self.validate_isinstance(AsyncIterable, '__aiter__') @@ -949,13 +949,13 @@ def __aiter__(self): return self async def __anext__(self): raise StopAsyncIteration - self.assertTrue(isinstance(AI(), AsyncIterator)) - self.assertTrue(issubclass(AI, AsyncIterator)) + self.assertIsInstance(AI(), AsyncIterator) + self.assertIsSubclass(AI, AsyncIterator) non_samples = [None, object, []] # Check some non-iterables for x in non_samples: self.assertNotIsInstance(x, AsyncIterator) - self.assertFalse(issubclass(type(x), AsyncIterator), repr(type(x))) + self.assertNotIsSubclass(type(x), AsyncIterator) # Similarly to regular iterators (see issue 10565) class AnextOnly: async def __anext__(self): @@ -968,7 +968,7 @@ def test_Iterable(self): non_samples = [None, 42, 3.14, 1j] for x in non_samples: self.assertNotIsInstance(x, Iterable) - self.assertFalse(issubclass(type(x), Iterable), repr(type(x))) + self.assertNotIsSubclass(type(x), Iterable) # Check some iterables samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), @@ -978,13 +978,13 @@ def test_Iterable(self): ] for x in samples: self.assertIsInstance(x, Iterable) - self.assertTrue(issubclass(type(x), Iterable), repr(type(x))) + self.assertIsSubclass(type(x), Iterable) # Check direct subclassing class I(Iterable): def __iter__(self): return super().__iter__() self.assertEqual(list(I()), []) - self.assertFalse(issubclass(str, I)) + self.assertNotIsSubclass(str, I) self.validate_abstract_methods(Iterable, '__iter__') self.validate_isinstance(Iterable, '__iter__') # Check None blocking @@ -992,22 +992,22 @@ class It: def __iter__(self): return iter([]) class ItBlocked(It): __iter__ = None - self.assertTrue(issubclass(It, Iterable)) - self.assertTrue(isinstance(It(), Iterable)) - self.assertFalse(issubclass(ItBlocked, Iterable)) - self.assertFalse(isinstance(ItBlocked(), Iterable)) + self.assertIsSubclass(It, Iterable) + self.assertIsInstance(It(), Iterable) + self.assertNotIsSubclass(ItBlocked, Iterable) + self.assertNotIsInstance(ItBlocked(), Iterable) def test_Reversible(self): # Check some non-reversibles non_samples = [None, 42, 3.14, 1j, set(), frozenset()] for x in non_samples: self.assertNotIsInstance(x, Reversible) - self.assertFalse(issubclass(type(x), Reversible), repr(type(x))) + self.assertNotIsSubclass(type(x), Reversible) # Check some non-reversible iterables non_reversibles = [_test_gen(), (x for x in []), iter([]), reversed([])] for x in non_reversibles: self.assertNotIsInstance(x, Reversible) - self.assertFalse(issubclass(type(x), Reversible), repr(type(x))) + self.assertNotIsSubclass(type(x), Reversible) # Check some reversible iterables samples = [bytes(), str(), tuple(), list(), OrderedDict(), OrderedDict().keys(), OrderedDict().items(), @@ -1016,11 +1016,11 @@ def test_Reversible(self): dict().keys(), dict().items(), dict().values()] for x in samples: self.assertIsInstance(x, Reversible) - self.assertTrue(issubclass(type(x), Reversible), repr(type(x))) + self.assertIsSubclass(type(x), Reversible) # Check also Mapping, MutableMapping, and Sequence - self.assertTrue(issubclass(Sequence, Reversible), repr(Sequence)) - self.assertFalse(issubclass(Mapping, Reversible), repr(Mapping)) - self.assertFalse(issubclass(MutableMapping, Reversible), repr(MutableMapping)) + self.assertIsSubclass(Sequence, Reversible) + self.assertNotIsSubclass(Mapping, Reversible) + self.assertNotIsSubclass(MutableMapping, Reversible) # Check direct subclassing class R(Reversible): def __iter__(self): @@ -1028,17 +1028,17 @@ def __iter__(self): def __reversed__(self): return iter(list()) self.assertEqual(list(reversed(R())), []) - self.assertFalse(issubclass(float, R)) + self.assertNotIsSubclass(float, R) self.validate_abstract_methods(Reversible, '__reversed__', '__iter__') # Check reversible non-iterable (which is not Reversible) class RevNoIter: def __reversed__(self): return reversed([]) class RevPlusIter(RevNoIter): def __iter__(self): return iter([]) - self.assertFalse(issubclass(RevNoIter, Reversible)) - self.assertFalse(isinstance(RevNoIter(), Reversible)) - self.assertTrue(issubclass(RevPlusIter, Reversible)) - self.assertTrue(isinstance(RevPlusIter(), Reversible)) + self.assertNotIsSubclass(RevNoIter, Reversible) + self.assertNotIsInstance(RevNoIter(), Reversible) + self.assertIsSubclass(RevPlusIter, Reversible) + self.assertIsInstance(RevPlusIter(), Reversible) # Check None blocking class Rev: def __iter__(self): return iter([]) @@ -1047,39 +1047,38 @@ class RevItBlocked(Rev): __iter__ = None class RevRevBlocked(Rev): __reversed__ = None - self.assertTrue(issubclass(Rev, Reversible)) - self.assertTrue(isinstance(Rev(), Reversible)) - self.assertFalse(issubclass(RevItBlocked, Reversible)) - self.assertFalse(isinstance(RevItBlocked(), Reversible)) - self.assertFalse(issubclass(RevRevBlocked, Reversible)) - self.assertFalse(isinstance(RevRevBlocked(), Reversible)) + self.assertIsSubclass(Rev, Reversible) + self.assertIsInstance(Rev(), Reversible) + self.assertNotIsSubclass(RevItBlocked, Reversible) + self.assertNotIsInstance(RevItBlocked(), Reversible) + self.assertNotIsSubclass(RevRevBlocked, Reversible) + self.assertNotIsInstance(RevRevBlocked(), Reversible) def test_Collection(self): # Check some non-collections non_collections = [None, 42, 3.14, 1j, lambda x: 2*x] for x in non_collections: self.assertNotIsInstance(x, Collection) - self.assertFalse(issubclass(type(x), Collection), repr(type(x))) + self.assertNotIsSubclass(type(x), Collection) # Check some non-collection iterables non_col_iterables = [_test_gen(), iter(b''), iter(bytearray()), (x for x in [])] for x in non_col_iterables: self.assertNotIsInstance(x, Collection) - self.assertFalse(issubclass(type(x), Collection), repr(type(x))) + self.assertNotIsSubclass(type(x), Collection) # Check some collections samples = [set(), frozenset(), dict(), bytes(), str(), tuple(), list(), dict().keys(), dict().items(), dict().values()] for x in samples: self.assertIsInstance(x, Collection) - self.assertTrue(issubclass(type(x), Collection), repr(type(x))) + self.assertIsSubclass(type(x), Collection) # Check also Mapping, MutableMapping, etc. - self.assertTrue(issubclass(Sequence, Collection), repr(Sequence)) - self.assertTrue(issubclass(Mapping, Collection), repr(Mapping)) - self.assertTrue(issubclass(MutableMapping, Collection), - repr(MutableMapping)) - self.assertTrue(issubclass(Set, Collection), repr(Set)) - self.assertTrue(issubclass(MutableSet, Collection), repr(MutableSet)) - self.assertTrue(issubclass(Sequence, Collection), repr(MutableSet)) + self.assertIsSubclass(Sequence, Collection) + self.assertIsSubclass(Mapping, Collection) + self.assertIsSubclass(MutableMapping, Collection) + self.assertIsSubclass(Set, Collection) + self.assertIsSubclass(MutableSet, Collection) + self.assertIsSubclass(Sequence, Collection) # Check direct subclassing class Col(Collection): def __iter__(self): @@ -1090,13 +1089,13 @@ def __contains__(self, item): return False class DerCol(Col): pass self.assertEqual(list(iter(Col())), []) - self.assertFalse(issubclass(list, Col)) - self.assertFalse(issubclass(set, Col)) - self.assertFalse(issubclass(float, Col)) + self.assertNotIsSubclass(list, Col) + self.assertNotIsSubclass(set, Col) + self.assertNotIsSubclass(float, Col) self.assertEqual(list(iter(DerCol())), []) - self.assertFalse(issubclass(list, DerCol)) - self.assertFalse(issubclass(set, DerCol)) - self.assertFalse(issubclass(float, DerCol)) + self.assertNotIsSubclass(list, DerCol) + self.assertNotIsSubclass(set, DerCol) + self.assertNotIsSubclass(float, DerCol) self.validate_abstract_methods(Collection, '__len__', '__iter__', '__contains__') # Check sized container non-iterable (which is not Collection) etc. @@ -1109,12 +1108,12 @@ def __contains__(self, item): return False class ColNoCont: def __iter__(self): return iter([]) def __len__(self): return 0 - self.assertFalse(issubclass(ColNoIter, Collection)) - self.assertFalse(isinstance(ColNoIter(), Collection)) - self.assertFalse(issubclass(ColNoSize, Collection)) - self.assertFalse(isinstance(ColNoSize(), Collection)) - self.assertFalse(issubclass(ColNoCont, Collection)) - self.assertFalse(isinstance(ColNoCont(), Collection)) + self.assertNotIsSubclass(ColNoIter, Collection) + self.assertNotIsInstance(ColNoIter(), Collection) + self.assertNotIsSubclass(ColNoSize, Collection) + self.assertNotIsInstance(ColNoSize(), Collection) + self.assertNotIsSubclass(ColNoCont, Collection) + self.assertNotIsInstance(ColNoCont(), Collection) # Check None blocking class SizeBlock: def __iter__(self): return iter([]) @@ -1124,10 +1123,10 @@ class IterBlock: def __len__(self): return 0 def __contains__(self): return True __iter__ = None - self.assertFalse(issubclass(SizeBlock, Collection)) - self.assertFalse(isinstance(SizeBlock(), Collection)) - self.assertFalse(issubclass(IterBlock, Collection)) - self.assertFalse(isinstance(IterBlock(), Collection)) + self.assertNotIsSubclass(SizeBlock, Collection) + self.assertNotIsInstance(SizeBlock(), Collection) + self.assertNotIsSubclass(IterBlock, Collection) + self.assertNotIsInstance(IterBlock(), Collection) # Check None blocking in subclass class ColImpl: def __iter__(self): @@ -1138,15 +1137,15 @@ def __contains__(self, item): return False class NonCol(ColImpl): __contains__ = None - self.assertFalse(issubclass(NonCol, Collection)) - self.assertFalse(isinstance(NonCol(), Collection)) + self.assertNotIsSubclass(NonCol, Collection) + self.assertNotIsInstance(NonCol(), Collection) def test_Iterator(self): non_samples = [None, 42, 3.14, 1j, b"", "", (), [], {}, set()] for x in non_samples: self.assertNotIsInstance(x, Iterator) - self.assertFalse(issubclass(type(x), Iterator), repr(type(x))) + self.assertNotIsSubclass(type(x), Iterator) samples = [iter(bytes()), iter(str()), iter(tuple()), iter(list()), iter(dict()), iter(set()), iter(frozenset()), @@ -1157,7 +1156,7 @@ def test_Iterator(self): ] for x in samples: self.assertIsInstance(x, Iterator) - self.assertTrue(issubclass(type(x), Iterator), repr(type(x))) + self.assertIsSubclass(type(x), Iterator) self.validate_abstract_methods(Iterator, '__next__', '__iter__') # Issue 10565 @@ -1190,7 +1189,7 @@ def throw(self, typ, val=None, tb=None): pass iter(()), iter([]), NonGen1(), NonGen2(), NonGen3()] for x in non_samples: self.assertNotIsInstance(x, Generator) - self.assertFalse(issubclass(type(x), Generator), repr(type(x))) + self.assertNotIsSubclass(type(x), Generator) class Gen: def __iter__(self): return self @@ -1212,7 +1211,7 @@ def gen(): for x in samples: self.assertIsInstance(x, Iterator) self.assertIsInstance(x, Generator) - self.assertTrue(issubclass(type(x), Generator), repr(type(x))) + self.assertIsSubclass(type(x), Generator) self.validate_abstract_methods(Generator, 'send', 'throw') # mixin tests @@ -1261,7 +1260,7 @@ def athrow(self, typ, val=None, tb=None): pass iter(()), iter([]), NonAGen1(), NonAGen2(), NonAGen3()] for x in non_samples: self.assertNotIsInstance(x, AsyncGenerator) - self.assertFalse(issubclass(type(x), AsyncGenerator), repr(type(x))) + self.assertNotIsSubclass(type(x), AsyncGenerator) class Gen: def __aiter__(self): return self @@ -1283,7 +1282,7 @@ async def gen(): for x in samples: self.assertIsInstance(x, AsyncIterator) self.assertIsInstance(x, AsyncGenerator) - self.assertTrue(issubclass(type(x), AsyncGenerator), repr(type(x))) + self.assertIsSubclass(type(x), AsyncGenerator) self.validate_abstract_methods(AsyncGenerator, 'asend', 'athrow') def run_async(coro): @@ -1326,14 +1325,14 @@ def test_Sized(self): ] for x in non_samples: self.assertNotIsInstance(x, Sized) - self.assertFalse(issubclass(type(x), Sized), repr(type(x))) + self.assertNotIsSubclass(type(x), Sized) samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), dict().keys(), dict().items(), dict().values(), ] for x in samples: self.assertIsInstance(x, Sized) - self.assertTrue(issubclass(type(x), Sized), repr(type(x))) + self.assertIsSubclass(type(x), Sized) self.validate_abstract_methods(Sized, '__len__') self.validate_isinstance(Sized, '__len__') @@ -1344,14 +1343,14 @@ def test_Container(self): ] for x in non_samples: self.assertNotIsInstance(x, Container) - self.assertFalse(issubclass(type(x), Container), repr(type(x))) + self.assertNotIsSubclass(type(x), Container) samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), dict().keys(), dict().items(), ] for x in samples: self.assertIsInstance(x, Container) - self.assertTrue(issubclass(type(x), Container), repr(type(x))) + self.assertIsSubclass(type(x), Container) self.validate_abstract_methods(Container, '__contains__') self.validate_isinstance(Container, '__contains__') @@ -1363,7 +1362,7 @@ def test_Callable(self): ] for x in non_samples: self.assertNotIsInstance(x, Callable) - self.assertFalse(issubclass(type(x), Callable), repr(type(x))) + self.assertNotIsSubclass(type(x), Callable) samples = [lambda: None, type, int, object, len, @@ -1371,7 +1370,7 @@ def test_Callable(self): ] for x in samples: self.assertIsInstance(x, Callable) - self.assertTrue(issubclass(type(x), Callable), repr(type(x))) + self.assertIsSubclass(type(x), Callable) self.validate_abstract_methods(Callable, '__call__') self.validate_isinstance(Callable, '__call__') @@ -1379,16 +1378,16 @@ def test_direct_subclassing(self): for B in Hashable, Iterable, Iterator, Reversible, Sized, Container, Callable: class C(B): pass - self.assertTrue(issubclass(C, B)) - self.assertFalse(issubclass(int, C)) + self.assertIsSubclass(C, B) + self.assertNotIsSubclass(int, C) def test_registration(self): for B in Hashable, Iterable, Iterator, Reversible, Sized, Container, Callable: class C: __hash__ = None # Make sure it isn't hashable by default - self.assertFalse(issubclass(C, B), B.__name__) + self.assertNotIsSubclass(C, B) B.register(C) - self.assertTrue(issubclass(C, B)) + self.assertIsSubclass(C, B) class WithSet(MutableSet): @@ -1419,7 +1418,7 @@ class TestCollectionABCs(ABCTestCase): def test_Set(self): for sample in [set, frozenset]: self.assertIsInstance(sample(), Set) - self.assertTrue(issubclass(sample, Set)) + self.assertIsSubclass(sample, Set) self.validate_abstract_methods(Set, '__contains__', '__iter__', '__len__') class MySet(Set): def __contains__(self, x): @@ -1500,9 +1499,9 @@ def __len__(self): def test_MutableSet(self): self.assertIsInstance(set(), MutableSet) - self.assertTrue(issubclass(set, MutableSet)) + self.assertIsSubclass(set, MutableSet) self.assertNotIsInstance(frozenset(), MutableSet) - self.assertFalse(issubclass(frozenset, MutableSet)) + self.assertNotIsSubclass(frozenset, MutableSet) self.validate_abstract_methods(MutableSet, '__contains__', '__iter__', '__len__', 'add', 'discard') @@ -1841,7 +1840,7 @@ def test_Set_hash_matches_frozenset(self): def test_Mapping(self): for sample in [dict]: self.assertIsInstance(sample(), Mapping) - self.assertTrue(issubclass(sample, Mapping)) + self.assertIsSubclass(sample, Mapping) self.validate_abstract_methods(Mapping, '__contains__', '__iter__', '__len__', '__getitem__') class MyMapping(Mapping): @@ -1857,7 +1856,7 @@ def __iter__(self): def test_MutableMapping(self): for sample in [dict]: self.assertIsInstance(sample(), MutableMapping) - self.assertTrue(issubclass(sample, MutableMapping)) + self.assertIsSubclass(sample, MutableMapping) self.validate_abstract_methods(MutableMapping, '__contains__', '__iter__', '__len__', '__getitem__', '__setitem__', '__delitem__') @@ -1891,12 +1890,12 @@ def test_MutableMapping_subclass(self): def test_Sequence(self): for sample in [tuple, list, bytes, str]: self.assertIsInstance(sample(), Sequence) - self.assertTrue(issubclass(sample, Sequence)) + self.assertIsSubclass(sample, Sequence) self.assertIsInstance(range(10), Sequence) - self.assertTrue(issubclass(range, Sequence)) + self.assertIsSubclass(range, Sequence) self.assertIsInstance(memoryview(b""), Sequence) - self.assertTrue(issubclass(memoryview, Sequence)) - self.assertTrue(issubclass(str, Sequence)) + self.assertIsSubclass(memoryview, Sequence) + self.assertIsSubclass(str, Sequence) self.validate_abstract_methods(Sequence, '__contains__', '__iter__', '__len__', '__getitem__') @@ -1938,21 +1937,21 @@ def assert_index_same(seq1, seq2, index_args): def test_Buffer(self): for sample in [bytes, bytearray, memoryview]: self.assertIsInstance(sample(b"x"), Buffer) - self.assertTrue(issubclass(sample, Buffer)) + self.assertIsSubclass(sample, Buffer) for sample in [str, list, tuple]: self.assertNotIsInstance(sample(), Buffer) - self.assertFalse(issubclass(sample, Buffer)) + self.assertNotIsSubclass(sample, Buffer) self.validate_abstract_methods(Buffer, '__buffer__') def test_MutableSequence(self): for sample in [tuple, str, bytes]: self.assertNotIsInstance(sample(), MutableSequence) - self.assertFalse(issubclass(sample, MutableSequence)) + self.assertNotIsSubclass(sample, MutableSequence) for sample in [list, bytearray, deque]: self.assertIsInstance(sample(), MutableSequence) - self.assertTrue(issubclass(sample, MutableSequence)) - self.assertTrue(issubclass(array.array, MutableSequence)) - self.assertFalse(issubclass(str, MutableSequence)) + self.assertIsSubclass(sample, MutableSequence) + self.assertIsSubclass(array.array, MutableSequence) + self.assertNotIsSubclass(str, MutableSequence) self.validate_abstract_methods(MutableSequence, '__contains__', '__iter__', '__len__', '__getitem__', '__setitem__', '__delitem__', 'insert') @@ -2043,8 +2042,8 @@ def test_basics(self): self.assertEqual(c, Counter(a=3, b=2, c=1)) self.assertIsInstance(c, dict) self.assertIsInstance(c, Mapping) - self.assertTrue(issubclass(Counter, dict)) - self.assertTrue(issubclass(Counter, Mapping)) + self.assertIsSubclass(Counter, dict) + self.assertIsSubclass(Counter, Mapping) self.assertEqual(len(c), 3) self.assertEqual(sum(c.values()), 6) self.assertEqual(list(c.values()), [3, 2, 1]) diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index 3a34c6822bc079..a580a240d9f474 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -766,6 +766,7 @@ def test_d_compile_error(self): rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir) self.assertRegex(out, b'File "dinsdale') + @support.force_not_colorized def test_d_runtime_error(self): bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception') self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir) diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py index f8c4fc14c91ebe..cf5e2d901db4de 100644 --- a/Lib/test/test_compiler_codegen.py +++ b/Lib/test/test_compiler_codegen.py @@ -29,7 +29,6 @@ def test_if_expression(self): ('LOAD_CONST', 0, 1), ('TO_BOOL', 0, 1), ('POP_JUMP_IF_FALSE', false_lbl := self.Label(), 1), - ('NOT_TAKEN', None, 1), ('LOAD_SMALL_INT', 42, 1), ('JUMP_NO_INTERRUPT', exit_lbl := self.Label()), false_lbl, @@ -50,7 +49,6 @@ def test_for_loop(self): ('GET_ITER', None, 1), loop_lbl := self.Label(), ('FOR_ITER', exit_lbl := self.Label(), 1), - ('NOT_TAKEN', None, 1), ('NOP', None, 1, 1), ('STORE_NAME', 1, 1), ('LOAD_NAME', 2, 2), @@ -61,7 +59,7 @@ def test_for_loop(self): ('JUMP', loop_lbl), exit_lbl, ('END_FOR', None), - ('POP_TOP', None), + ('POP_ITER', None), ('LOAD_CONST', 0), ('RETURN_VALUE', None), ] diff --git a/Lib/test/test_configparser.py b/Lib/test/test_configparser.py index e3c5d08dd1e7d1..bde805eb741c33 100644 --- a/Lib/test/test_configparser.py +++ b/Lib/test/test_configparser.py @@ -2174,6 +2174,15 @@ def test_disabled_error(self): with self.assertRaises(configparser.UnnamedSectionDisabledError): configparser.ConfigParser().add_section(configparser.UNNAMED_SECTION) + def test_multiple_configs(self): + cfg = configparser.ConfigParser(allow_unnamed_section=True) + cfg.read_string('a = 1') + cfg.read_string('b = 2') + + self.assertEqual([configparser.UNNAMED_SECTION], cfg.sections()) + self.assertEqual('1', cfg[configparser.UNNAMED_SECTION]['a']) + self.assertEqual('2', cfg[configparser.UNNAMED_SECTION]['b']) + class MiscTestCase(unittest.TestCase): def test__all__(self): diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py index c80fdff5de685d..7f1f6cf58402c9 100644 --- a/Lib/test/test_ctypes/test_arrays.py +++ b/Lib/test/test_ctypes/test_arrays.py @@ -5,7 +5,7 @@ create_string_buffer, create_unicode_buffer, c_char, c_wchar, c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, c_long, c_ulonglong, c_float, c_double, c_longdouble) -from test.support import bigmemtest, _2G +from test.support import bigmemtest, _2G, threading_helper, Py_GIL_DISABLED from ._support import (_CData, PyCArrayType, Py_TPFLAGS_DISALLOW_INSTANTIATION, Py_TPFLAGS_IMMUTABLETYPE) @@ -267,6 +267,26 @@ def test_bpo36504_signed_int_overflow(self): def test_large_array(self, size): c_char * size + @threading_helper.requires_working_threading() + @unittest.skipUnless(Py_GIL_DISABLED, "only meaningful if the GIL is disabled") + def test_thread_safety(self): + from threading import Thread + + buffer = (ctypes.c_char_p * 10)() + + def run(): + for i in range(100): + buffer.value = b"hello" + buffer[0] = b"j" + + with threading_helper.catch_threading_exception() as cm: + threads = (Thread(target=run) for _ in range(25)) + with threading_helper.start_threads(threads): + pass + + if cm.exc_value: + raise cm.exc_value + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_ctypes/test_c_simple_type_meta.py b/Lib/test/test_ctypes/test_c_simple_type_meta.py index eb77d6d7782478..b446fd5c77dde2 100644 --- a/Lib/test/test_ctypes/test_c_simple_type_meta.py +++ b/Lib/test/test_ctypes/test_c_simple_type_meta.py @@ -1,4 +1,5 @@ import unittest +from test.support import MS_WINDOWS import ctypes from ctypes import POINTER, c_void_p @@ -54,9 +55,9 @@ class Sub2(Sub): pass self.assertIsInstance(POINTER(Sub2), p_meta) - self.assertTrue(issubclass(POINTER(Sub2), Sub2)) - self.assertTrue(issubclass(POINTER(Sub2), POINTER(Sub))) - self.assertTrue(issubclass(POINTER(Sub), POINTER(CtBase))) + self.assertIsSubclass(POINTER(Sub2), Sub2) + self.assertIsSubclass(POINTER(Sub2), POINTER(Sub)) + self.assertIsSubclass(POINTER(Sub), POINTER(CtBase)) def test_creating_pointer_in_dunder_new_2(self): # A simpler variant of the above, used in `CoClass` of the `comtypes` @@ -84,7 +85,7 @@ class Sub(CtBase): pass self.assertIsInstance(POINTER(Sub), p_meta) - self.assertTrue(issubclass(POINTER(Sub), Sub)) + self.assertIsSubclass(POINTER(Sub), Sub) def test_creating_pointer_in_dunder_init_1(self): class ct_meta(type): @@ -120,9 +121,9 @@ class Sub2(Sub): pass self.assertIsInstance(POINTER(Sub2), p_meta) - self.assertTrue(issubclass(POINTER(Sub2), Sub2)) - self.assertTrue(issubclass(POINTER(Sub2), POINTER(Sub))) - self.assertTrue(issubclass(POINTER(Sub), POINTER(CtBase))) + self.assertIsSubclass(POINTER(Sub2), Sub2) + self.assertIsSubclass(POINTER(Sub2), POINTER(Sub)) + self.assertIsSubclass(POINTER(Sub), POINTER(CtBase)) def test_creating_pointer_in_dunder_init_2(self): class ct_meta(type): @@ -149,4 +150,21 @@ class Sub(CtBase): pass self.assertIsInstance(POINTER(Sub), p_meta) - self.assertTrue(issubclass(POINTER(Sub), Sub)) + self.assertIsSubclass(POINTER(Sub), Sub) + + def test_bad_type_message(self): + """Verify the error message that lists all available type codes""" + # (The string is generated at runtime, so this checks the underlying + # set of types as well as correct construction of the string.) + with self.assertRaises(AttributeError) as cm: + class F(metaclass=PyCSimpleType): + _type_ = "\0" + message = str(cm.exception) + expected_type_chars = list('cbBhHiIlLdCEFfuzZqQPXOv?g') + if not hasattr(ctypes, 'c_float_complex'): + expected_type_chars.remove('C') + expected_type_chars.remove('E') + expected_type_chars.remove('F') + if not MS_WINDOWS: + expected_type_chars.remove('X') + self.assertIn("'" + ''.join(expected_type_chars) + "'", message) diff --git a/Lib/test/test_ctypes/test_callbacks.py b/Lib/test/test_ctypes/test_callbacks.py index 8f483dfe1db801..6c7c2e5270736e 100644 --- a/Lib/test/test_ctypes/test_callbacks.py +++ b/Lib/test/test_ctypes/test_callbacks.py @@ -324,7 +324,7 @@ def func(): self.assertIsInstance(cm.unraisable.exc_value, TypeError) self.assertEqual(cm.unraisable.err_msg, - f"Exception ignored on converting result " + f"Exception ignored while converting result " f"of ctypes callback function {func!r}") self.assertIsNone(cm.unraisable.object) diff --git a/Lib/test/test_ctypes/test_generated_structs.py b/Lib/test/test_ctypes/test_generated_structs.py index d61754d6d49e70..1df9f0dc16368f 100644 --- a/Lib/test/test_ctypes/test_generated_structs.py +++ b/Lib/test/test_ctypes/test_generated_structs.py @@ -443,7 +443,7 @@ def test_generated_data(self): - None - reason to skip the test (str) - This does depend on the C compiler keeping padding bits zero. + This does depend on the C compiler keeping padding bits unchanged. Common compilers seem to do so. """ for name, cls in TESTCASES.items(): @@ -696,7 +696,8 @@ def output(string): output(' ' + line) typename = f'{struct_or_union(cls)} {name}' output(f""" - {typename} value = {{0}}; + {typename} value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString({c_str_repr(name)})); APPEND(PyLong_FromLong(sizeof({typename}))); APPEND(PyLong_FromLong(_Alignof({typename}))); diff --git a/Lib/test/test_ctypes/test_loading.py b/Lib/test/test_ctypes/test_loading.py index fc1eecb77e17e3..13ed813ad98c31 100644 --- a/Lib/test/test_ctypes/test_loading.py +++ b/Lib/test/test_ctypes/test_loading.py @@ -135,7 +135,7 @@ def test_1703286_B(self): 'test specific to Windows') def test_load_hasattr(self): # bpo-34816: shouldn't raise OSError - self.assertFalse(hasattr(ctypes.windll, 'test')) + self.assertNotHasAttr(ctypes.windll, 'test') @unittest.skipUnless(os.name == "nt", 'test specific to Windows') diff --git a/Lib/test/test_ctypes/test_random_things.py b/Lib/test/test_ctypes/test_random_things.py index 630f6ed9489eba..73ff57d925e2ea 100644 --- a/Lib/test/test_ctypes/test_random_things.py +++ b/Lib/test/test_ctypes/test_random_things.py @@ -51,7 +51,7 @@ def expect_unraisable(self, exc_type, exc_msg=None): if exc_msg is not None: self.assertEqual(str(cm.unraisable.exc_value), exc_msg) self.assertEqual(cm.unraisable.err_msg, - f"Exception ignored on calling ctypes " + f"Exception ignored while calling ctypes " f"callback function {callback_func!r}") self.assertIsNone(cm.unraisable.object) diff --git a/Lib/test/test_ctypes/test_repr.py b/Lib/test/test_ctypes/test_repr.py index e7587984a92c45..8c85e6cbe70cea 100644 --- a/Lib/test/test_ctypes/test_repr.py +++ b/Lib/test/test_ctypes/test_repr.py @@ -22,12 +22,12 @@ class ReprTest(unittest.TestCase): def test_numbers(self): for typ in subclasses: base = typ.__bases__[0] - self.assertTrue(repr(base(42)).startswith(base.__name__)) - self.assertEqual(" at 0x..., file "%s", line %d>) MAKE_FUNCTION LOAD_FAST 0 (x) @@ -434,13 +432,13 @@ def foo(a: int, b: str) -> str: 1 LOAD_SMALL_INT 0 STORE_NAME 0 (x) - 2 L1: NOT_TAKEN + 2 L1: NOP 3 LOAD_NAME 0 (x) LOAD_SMALL_INT 1 BINARY_OP 13 (+=) STORE_NAME 0 (x) - JUMP_BACKWARD 8 (to L1) + JUMP_BACKWARD 12 (to L1) """ dis_traceback = """\ @@ -649,11 +647,11 @@ async def _asyncwith(c): L20: CLEANUP_THROW L21: END_SEND TO_BOOL - POP_JUMP_IF_TRUE 2 (to L22) - NOT_TAKEN - RERAISE 2 - L22: POP_TOP - L23: POP_EXCEPT + POP_JUMP_IF_TRUE 2 (to L24) + L22: NOT_TAKEN + L23: RERAISE 2 + L24: POP_TOP + L25: POP_EXCEPT POP_TOP POP_TOP POP_TOP @@ -663,24 +661,25 @@ async def _asyncwith(c): LOAD_CONST 0 (None) RETURN_VALUE - -- L24: COPY 3 + -- L26: COPY 3 POP_EXCEPT RERAISE 1 - L25: CALL_INTRINSIC_1 3 (INTRINSIC_STOPITERATION_ERROR) + L27: CALL_INTRINSIC_1 3 (INTRINSIC_STOPITERATION_ERROR) RERAISE 1 ExceptionTable: - L1 to L3 -> L25 [0] lasti + L1 to L3 -> L27 [0] lasti L3 to L4 -> L12 [4] - L4 to L6 -> L25 [0] lasti + L4 to L6 -> L27 [0] lasti L6 to L7 -> L16 [2] lasti - L7 to L9 -> L25 [0] lasti + L7 to L9 -> L27 [0] lasti L9 to L10 -> L14 [2] - L10 to L13 -> L25 [0] lasti - L14 to L15 -> L25 [0] lasti - L16 to L18 -> L24 [4] lasti + L10 to L13 -> L27 [0] lasti + L14 to L15 -> L27 [0] lasti + L16 to L18 -> L26 [4] lasti L18 to L19 -> L20 [7] - L19 to L23 -> L24 [4] lasti - L23 to L25 -> L25 [0] lasti + L19 to L22 -> L26 [4] lasti + L23 to L25 -> L26 [4] lasti + L25 to L27 -> L27 [0] lasti """ % (_asyncwith.__code__.co_firstlineno, _asyncwith.__code__.co_firstlineno + 1, _asyncwith.__code__.co_firstlineno + 2, @@ -844,8 +843,7 @@ def foo(x): L1: RESUME 0 LOAD_FAST 0 (.0) GET_ITER - L2: FOR_ITER 11 (to L3) - NOT_TAKEN + L2: FOR_ITER 14 (to L3) STORE_FAST 1 (z) LOAD_DEREF 2 (x) LOAD_FAST 1 (z) @@ -853,9 +851,9 @@ def foo(x): YIELD_VALUE 0 RESUME 5 POP_TOP - JUMP_BACKWARD 13 (to L2) + JUMP_BACKWARD 16 (to L2) L3: END_FOR - POP_TOP + POP_ITER LOAD_CONST 0 (None) RETURN_VALUE @@ -894,24 +892,23 @@ def loop_test(): %3d RESUME_CHECK 0 %3d BUILD_LIST 0 - LOAD_CONST 0 ((1, 2, 3)) + LOAD_CONST_MORTAL 1 ((1, 2, 3)) LIST_EXTEND 1 LOAD_SMALL_INT 3 BINARY_OP 5 (*) GET_ITER - L1: FOR_ITER_LIST 15 (to L2) - NOT_TAKEN + L1: FOR_ITER_LIST 14 (to L2) STORE_FAST 0 (i) %3d LOAD_GLOBAL_MODULE 1 (load_test + NULL) LOAD_FAST 0 (i) CALL_PY_GENERAL 1 POP_TOP - JUMP_BACKWARD 17 (to L1) + JUMP_BACKWARD_{: <6} 16 (to L1) %3d L2: END_FOR - POP_TOP - LOAD_CONST_IMMORTAL 1 (None) + POP_ITER + LOAD_CONST_IMMORTAL 0 (None) RETURN_VALUE """ % (loop_test.__code__.co_firstlineno, loop_test.__code__.co_firstlineno + 1, @@ -934,8 +931,6 @@ def extended_arg_quick(): """% (extended_arg_quick.__code__.co_firstlineno, extended_arg_quick.__code__.co_firstlineno + 1,) -ADAPTIVE_WARMUP_DELAY = 2 - class DisTestBase(unittest.TestCase): "Common utilities for DisTests and TestDisTraceback" @@ -1002,12 +997,14 @@ def test_boundaries(self): def test_widths(self): long_opcodes = set(['JUMP_BACKWARD_NO_INTERRUPT', 'INSTRUMENTED_CALL_FUNCTION_EX']) - for opcode, opname in enumerate(dis.opname): + for op, opname in enumerate(dis.opname): if opname in long_opcodes or opname.startswith("INSTRUMENTED"): continue + if opname in opcode._specialized_opmap: + continue with self.subTest(opname=opname): width = dis._OPNAME_WIDTH - if opcode in dis.hasarg: + if op in dis.hasarg: width += 1 + dis._OPARG_WIDTH self.assertLessEqual(len(opname), width) @@ -1260,8 +1257,9 @@ def test__try_compile_no_context_exc_on_error(self): self.assertIsNone(e.__context__) @staticmethod - def code_quicken(f, times=ADAPTIVE_WARMUP_DELAY): - for _ in range(times): + def code_quicken(f): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f() @cpython_only @@ -1307,16 +1305,18 @@ def test_call_specialize(self): @requires_specialization def test_loop_quicken(self): # Loop can trigger a quicken where the loop is located - self.code_quicken(loop_test, 4) + self.code_quicken(loop_test) got = self.get_disassembly(loop_test, adaptive=True) - expected = dis_loop_test_quickened_code + jit = import_helper.import_module("_testinternalcapi").jit_enabled() + expected = dis_loop_test_quickened_code.format("JIT" if jit else "NO_JIT") self.do_disassembly_compare(got, expected) @cpython_only @requires_specialization def test_loop_with_conditional_at_end_is_quickened(self): + _testinternalcapi = import_helper.import_module("_testinternalcapi") def for_loop_true(x): - for i in range(10): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): if x: pass @@ -1325,7 +1325,7 @@ def for_loop_true(x): self.get_disassembly(for_loop_true, adaptive=True)) def for_loop_false(x): - for i in range(10): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): if x: pass @@ -1335,7 +1335,7 @@ def for_loop_false(x): def while_loop(): i = 0 - while i < 10: + while i < _testinternalcapi.SPECIALIZATION_THRESHOLD: i += 1 while_loop() @@ -1356,7 +1356,7 @@ def f(): self.code_quicken(f) else: # "copy" the code to un-quicken it: - f.__code__ = f.__code__.replace() + reset_code(f) for instruction in _unroll_caches_as_Instructions(dis.get_instructions( f, show_caches=True, adaptive=adaptive ), show_caches=True): @@ -1706,214 +1706,211 @@ def _prepare_test_cases(): Instruction = dis.Instruction expected_opinfo_outer = [ - Instruction(opname='MAKE_CELL', opcode=93, arg=0, argval='a', argrepr='a', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='MAKE_CELL', opcode=93, arg=1, argval='b', argrepr='b', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='MAKE_CELL', opcode=94, arg=0, argval='a', argrepr='a', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='MAKE_CELL', opcode=94, arg=1, argval='b', argrepr='b', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=4, start_offset=4, starts_line=True, line_number=1, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=(3, 4), argrepr='(3, 4)', offset=6, start_offset=6, starts_line=True, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='a', argrepr='a', offset=8, start_offset=8, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=1, argval='b', argrepr='b', offset=10, start_offset=10, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='BUILD_TUPLE', opcode=49, arg=2, argval=2, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=code_object_f, argrepr=repr(code_object_f), offset=14, start_offset=14, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='MAKE_FUNCTION', opcode=23, arg=None, argval=None, argrepr='', offset=16, start_offset=16, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=8, argval=8, argrepr='closure', offset=18, start_offset=18, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=1, argval=1, argrepr='defaults', offset=20, start_offset=20, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='STORE_FAST', opcode=108, arg=2, argval='f', argrepr='f', offset=22, start_offset=22, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='print', argrepr='print + NULL', offset=24, start_offset=24, starts_line=True, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_DEREF', opcode=81, arg=0, argval='a', argrepr='a', offset=34, start_offset=34, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=1, argval='b', argrepr='b', offset=36, start_offset=36, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=1, argval='', argrepr="''", offset=38, start_offset=38, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=1, argval=1, argrepr='', offset=40, start_offset=40, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='BUILD_LIST', opcode=44, arg=0, argval=0, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='BUILD_MAP', opcode=45, arg=0, argval=0, argrepr='', offset=44, start_offset=44, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=2, argval='Hello world!', argrepr="'Hello world!'", offset=46, start_offset=46, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=7, argval=7, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=2, argval='f', argrepr='f', offset=58, start_offset=58, starts_line=True, line_number=8, label=None, positions=None, cache_info=None), - Instruction(opname='RETURN_VALUE', opcode=34, arg=None, argval=None, argrepr='', offset=60, start_offset=60, starts_line=False, line_number=8, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=(3, 4), argrepr='(3, 4)', offset=6, start_offset=6, starts_line=True, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='a', argrepr='a', offset=8, start_offset=8, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=1, argval='b', argrepr='b', offset=10, start_offset=10, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='BUILD_TUPLE', opcode=51, arg=2, argval=2, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=code_object_f, argrepr=repr(code_object_f), offset=14, start_offset=14, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='MAKE_FUNCTION', opcode=24, arg=None, argval=None, argrepr='', offset=16, start_offset=16, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=8, argval=8, argrepr='closure', offset=18, start_offset=18, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=1, argval=1, argrepr='defaults', offset=20, start_offset=20, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='STORE_FAST', opcode=109, arg=2, argval='f', argrepr='f', offset=22, start_offset=22, starts_line=False, line_number=2, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='print', argrepr='print + NULL', offset=24, start_offset=24, starts_line=True, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_DEREF', opcode=82, arg=0, argval='a', argrepr='a', offset=34, start_offset=34, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=1, argval='b', argrepr='b', offset=36, start_offset=36, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval='', argrepr="''", offset=38, start_offset=38, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=1, argval=1, argrepr='', offset=40, start_offset=40, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='BUILD_LIST', opcode=46, arg=0, argval=0, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='BUILD_MAP', opcode=47, arg=0, argval=0, argrepr='', offset=44, start_offset=44, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval='Hello world!', argrepr="'Hello world!'", offset=46, start_offset=46, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=7, argval=7, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=2, argval='f', argrepr='f', offset=58, start_offset=58, starts_line=True, line_number=8, label=None, positions=None, cache_info=None), + Instruction(opname='RETURN_VALUE', opcode=36, arg=None, argval=None, argrepr='', offset=60, start_offset=60, starts_line=False, line_number=8, label=None, positions=None, cache_info=None), ] expected_opinfo_f = [ - Instruction(opname='COPY_FREE_VARS', opcode=59, arg=2, argval=2, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='MAKE_CELL', opcode=93, arg=0, argval='c', argrepr='c', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='MAKE_CELL', opcode=93, arg=1, argval='d', argrepr='d', offset=4, start_offset=4, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='COPY_FREE_VARS', opcode=60, arg=2, argval=2, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='MAKE_CELL', opcode=94, arg=0, argval='c', argrepr='c', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='MAKE_CELL', opcode=94, arg=1, argval='d', argrepr='d', offset=4, start_offset=4, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=6, start_offset=6, starts_line=True, line_number=2, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=1, argval=(5, 6), argrepr='(5, 6)', offset=8, start_offset=8, starts_line=True, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=3, argval='a', argrepr='a', offset=10, start_offset=10, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=4, argval='b', argrepr='b', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='c', argrepr='c', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=1, argval='d', argrepr='d', offset=16, start_offset=16, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='BUILD_TUPLE', opcode=49, arg=4, argval=4, argrepr='', offset=18, start_offset=18, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, start_offset=20, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='MAKE_FUNCTION', opcode=23, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=8, argval=8, argrepr='closure', offset=24, start_offset=24, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=1, argval=1, argrepr='defaults', offset=26, start_offset=26, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='STORE_FAST', opcode=108, arg=2, argval='inner', argrepr='inner', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_DEREF', opcode=81, arg=3, argval='a', argrepr='a', offset=40, start_offset=40, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=4, argval='b', argrepr='b', offset=42, start_offset=42, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=0, argval='c', argrepr='c', offset=44, start_offset=44, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=1, argval='d', argrepr='d', offset=46, start_offset=46, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=4, argval=4, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=2, argval='inner', argrepr='inner', offset=58, start_offset=58, starts_line=True, line_number=6, label=None, positions=None, cache_info=None), - Instruction(opname='RETURN_VALUE', opcode=34, arg=None, argval=None, argrepr='', offset=60, start_offset=60, starts_line=False, line_number=6, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval=(5, 6), argrepr='(5, 6)', offset=8, start_offset=8, starts_line=True, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=3, argval='a', argrepr='a', offset=10, start_offset=10, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=4, argval='b', argrepr='b', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='c', argrepr='c', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=1, argval='d', argrepr='d', offset=16, start_offset=16, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='BUILD_TUPLE', opcode=51, arg=4, argval=4, argrepr='', offset=18, start_offset=18, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, start_offset=20, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='MAKE_FUNCTION', opcode=24, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=8, argval=8, argrepr='closure', offset=24, start_offset=24, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=1, argval=1, argrepr='defaults', offset=26, start_offset=26, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='STORE_FAST', opcode=109, arg=2, argval='inner', argrepr='inner', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_DEREF', opcode=82, arg=3, argval='a', argrepr='a', offset=40, start_offset=40, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=4, argval='b', argrepr='b', offset=42, start_offset=42, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=0, argval='c', argrepr='c', offset=44, start_offset=44, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=1, argval='d', argrepr='d', offset=46, start_offset=46, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=4, argval=4, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=2, argval='inner', argrepr='inner', offset=58, start_offset=58, starts_line=True, line_number=6, label=None, positions=None, cache_info=None), + Instruction(opname='RETURN_VALUE', opcode=36, arg=None, argval=None, argrepr='', offset=60, start_offset=60, starts_line=False, line_number=6, label=None, positions=None, cache_info=None), ] expected_opinfo_inner = [ - Instruction(opname='COPY_FREE_VARS', opcode=59, arg=4, argval=4, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='COPY_FREE_VARS', opcode=60, arg=4, argval=4, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=2, start_offset=2, starts_line=True, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='print', argrepr='print + NULL', offset=4, start_offset=4, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_DEREF', opcode=81, arg=2, argval='a', argrepr='a', offset=14, start_offset=14, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=3, argval='b', argrepr='b', offset=16, start_offset=16, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=4, argval='c', argrepr='c', offset=18, start_offset=18, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_DEREF', opcode=81, arg=5, argval='d', argrepr='d', offset=20, start_offset=20, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST_LOAD_FAST', opcode=85, arg=1, argval=('e', 'f'), argrepr='e, f', offset=22, start_offset=22, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=6, argval=6, argrepr='', offset=24, start_offset=24, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=32, start_offset=32, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=None, argrepr='None', offset=34, start_offset=34, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='RETURN_VALUE', opcode=34, arg=None, argval=None, argrepr='', offset=36, start_offset=36, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='print', argrepr='print + NULL', offset=4, start_offset=4, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_DEREF', opcode=82, arg=2, argval='a', argrepr='a', offset=14, start_offset=14, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=3, argval='b', argrepr='b', offset=16, start_offset=16, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=4, argval='c', argrepr='c', offset=18, start_offset=18, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_DEREF', opcode=82, arg=5, argval='d', argrepr='d', offset=20, start_offset=20, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST_LOAD_FAST', opcode=86, arg=1, argval=('e', 'f'), argrepr='e, f', offset=22, start_offset=22, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=6, argval=6, argrepr='', offset=24, start_offset=24, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=32, start_offset=32, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=None, argrepr='None', offset=34, start_offset=34, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='RETURN_VALUE', opcode=36, arg=None, argval=None, argrepr='', offset=36, start_offset=36, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), ] expected_opinfo_jumpy = [ Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=1, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='range', argrepr='range + NULL', offset=2, start_offset=2, starts_line=True, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=10, argval=10, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='GET_ITER', opcode=16, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='FOR_ITER', opcode=68, arg=34, argval=96, argrepr='to L4', offset=24, start_offset=24, starts_line=False, line_number=3, label=1, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='STORE_FAST', opcode=108, arg=0, argval='i', argrepr='i', offset=30, start_offset=30, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=32, start_offset=32, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=42, start_offset=42, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=44, start_offset=44, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=52, start_offset=52, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=54, start_offset=54, starts_line=True, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=4, argval=4, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='COMPARE_OP', opcode=55, arg=18, argval='<', argrepr='bool(<)', offset=58, start_offset=58, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=3, argval=72, argrepr='to L2', offset=62, start_offset=62, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=66, start_offset=66, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD', opcode=73, arg=24, argval=24, argrepr='to L1', offset=68, start_offset=68, starts_line=True, line_number=6, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=72, start_offset=72, starts_line=True, line_number=7, label=2, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=6, argval=6, argrepr='', offset=74, start_offset=74, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='COMPARE_OP', opcode=55, arg=148, argval='>', argrepr='bool(>)', offset=76, start_offset=76, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=99, arg=3, argval=90, argrepr='to L3', offset=80, start_offset=80, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=84, start_offset=84, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD', opcode=73, arg=33, argval=24, argrepr='to L1', offset=86, start_offset=86, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOP', opcode=27, arg=None, argval=None, argrepr='', offset=90, start_offset=90, starts_line=True, line_number=None, label=3, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=92, start_offset=92, starts_line=True, line_number=8, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_FORWARD', opcode=75, arg=13, argval=122, argrepr='to L5', offset=94, start_offset=94, starts_line=False, line_number=8, label=None, positions=None, cache_info=None), - Instruction(opname='END_FOR', opcode=9, arg=None, argval=None, argrepr='', offset=96, start_offset=96, starts_line=True, line_number=3, label=4, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=98, start_offset=98, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=100, start_offset=100, starts_line=True, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=110, start_offset=110, starts_line=False, line_number=10, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=112, start_offset=112, starts_line=False, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=120, start_offset=120, starts_line=False, line_number=10, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST_CHECK', opcode=84, arg=0, argval='i', argrepr='i', offset=122, start_offset=122, starts_line=True, line_number=11, label=5, positions=None, cache_info=None), - Instruction(opname='TO_BOOL', opcode=38, arg=None, argval=None, argrepr='', offset=124, start_offset=124, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=37, argval=210, argrepr='to L8', offset=132, start_offset=132, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=136, start_offset=136, starts_line=False, line_number=11, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=138, start_offset=138, starts_line=True, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=148, start_offset=148, starts_line=False, line_number=12, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=150, start_offset=150, starts_line=False, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=158, start_offset=158, starts_line=False, line_number=12, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=160, start_offset=160, starts_line=True, line_number=13, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=1, argval=1, argrepr='', offset=162, start_offset=162, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), - Instruction(opname='BINARY_OP', opcode=43, arg=23, argval=23, argrepr='-=', offset=164, start_offset=164, starts_line=False, line_number=13, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='STORE_FAST', opcode=108, arg=0, argval='i', argrepr='i', offset=168, start_offset=168, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=170, start_offset=170, starts_line=True, line_number=14, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=6, argval=6, argrepr='', offset=172, start_offset=172, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), - Instruction(opname='COMPARE_OP', opcode=55, arg=148, argval='>', argrepr='bool(>)', offset=174, start_offset=174, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=3, argval=188, argrepr='to L6', offset=178, start_offset=178, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=182, start_offset=182, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD', opcode=73, arg=33, argval=122, argrepr='to L5', offset=184, start_offset=184, starts_line=True, line_number=15, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=188, start_offset=188, starts_line=True, line_number=16, label=6, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=4, argval=4, argrepr='', offset=190, start_offset=190, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), - Instruction(opname='COMPARE_OP', opcode=55, arg=18, argval='<', argrepr='bool(<)', offset=192, start_offset=192, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=99, arg=3, argval=206, argrepr='to L7', offset=196, start_offset=196, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=200, start_offset=200, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD', opcode=73, arg=42, argval=122, argrepr='to L5', offset=202, start_offset=202, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOP', opcode=27, arg=None, argval=None, argrepr='', offset=206, start_offset=206, starts_line=True, line_number=None, label=7, positions=None, cache_info=None), - Instruction(opname='JUMP_FORWARD', opcode=75, arg=11, argval=232, argrepr='to L9', offset=208, start_offset=208, starts_line=True, line_number=17, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=210, start_offset=210, starts_line=True, line_number=19, label=8, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=80, arg=1, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=220, start_offset=220, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=222, start_offset=222, starts_line=False, line_number=19, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=230, start_offset=230, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), - Instruction(opname='NOP', opcode=27, arg=None, argval=None, argrepr='', offset=232, start_offset=232, starts_line=True, line_number=20, label=9, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=1, argval=1, argrepr='', offset=234, start_offset=234, starts_line=True, line_number=21, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=90, arg=0, argval=0, argrepr='', offset=236, start_offset=236, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), - Instruction(opname='BINARY_OP', opcode=43, arg=11, argval=11, argrepr='/', offset=238, start_offset=238, starts_line=False, line_number=21, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=242, start_offset=242, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=244, start_offset=244, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=58, arg=1, argval=1, argrepr='', offset=246, start_offset=246, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SPECIAL', opcode=91, arg=1, argval=1, argrepr='__exit__', offset=248, start_offset=248, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='SWAP', opcode=113, arg=2, argval=2, argrepr='', offset=250, start_offset=250, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='SWAP', opcode=113, arg=3, argval=3, argrepr='', offset=252, start_offset=252, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SPECIAL', opcode=91, arg=0, argval=0, argrepr='__enter__', offset=254, start_offset=254, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=0, argval=0, argrepr='', offset=256, start_offset=256, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='STORE_FAST', opcode=108, arg=1, argval='dodgy', argrepr='dodgy', offset=264, start_offset=264, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=266, start_offset=266, starts_line=True, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=80, arg=2, argval='Never reach this', argrepr="'Never reach this'", offset=276, start_offset=276, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=278, start_offset=278, starts_line=False, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=286, start_offset=286, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=None, argrepr='None', offset=288, start_offset=288, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=None, argrepr='None', offset=290, start_offset=290, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=None, argrepr='None', offset=292, start_offset=292, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=3, argval=3, argrepr='', offset=294, start_offset=294, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=302, start_offset=302, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=304, start_offset=304, starts_line=True, line_number=28, label=10, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=80, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=314, start_offset=314, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=316, start_offset=316, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=324, start_offset=324, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=None, argrepr='None', offset=326, start_offset=326, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='RETURN_VALUE', opcode=34, arg=None, argval=None, argrepr='', offset=328, start_offset=328, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='PUSH_EXC_INFO', opcode=31, arg=None, argval=None, argrepr='', offset=330, start_offset=330, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='WITH_EXCEPT_START', opcode=42, arg=None, argval=None, argrepr='', offset=332, start_offset=332, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='TO_BOOL', opcode=38, arg=None, argval=None, argrepr='', offset=334, start_offset=334, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=99, arg=2, argval=350, argrepr='to L11', offset=342, start_offset=342, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=346, start_offset=346, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=101, arg=2, argval=2, argrepr='', offset=348, start_offset=348, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=350, start_offset=350, starts_line=False, line_number=25, label=11, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=352, start_offset=352, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=354, start_offset=354, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=356, start_offset=356, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=74, arg=29, argval=304, argrepr='to L10', offset=360, start_offset=360, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=58, arg=3, argval=3, argrepr='', offset=362, start_offset=362, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=364, start_offset=364, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=101, arg=1, argval=1, argrepr='', offset=366, start_offset=366, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='PUSH_EXC_INFO', opcode=31, arg=None, argval=None, argrepr='', offset=368, start_offset=368, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=370, start_offset=370, starts_line=True, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='CHECK_EXC_MATCH', opcode=5, arg=None, argval=None, argrepr='', offset=380, start_offset=380, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=15, argval=416, argrepr='to L12', offset=382, start_offset=382, starts_line=False, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=386, start_offset=386, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=388, start_offset=388, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=390, start_offset=390, starts_line=True, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=80, arg=4, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=400, start_offset=400, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=402, start_offset=402, starts_line=False, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=410, start_offset=410, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=412, start_offset=412, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=74, arg=56, argval=304, argrepr='to L10', offset=414, start_offset=414, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=101, arg=0, argval=0, argrepr='', offset=416, start_offset=416, starts_line=True, line_number=22, label=12, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=58, arg=3, argval=3, argrepr='', offset=418, start_offset=418, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=420, start_offset=420, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=101, arg=1, argval=1, argrepr='', offset=422, start_offset=422, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='PUSH_EXC_INFO', opcode=31, arg=None, argval=None, argrepr='', offset=424, start_offset=424, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=426, start_offset=426, starts_line=True, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=80, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=436, start_offset=436, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=438, start_offset=438, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=30, arg=None, argval=None, argrepr='', offset=446, start_offset=446, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=101, arg=0, argval=0, argrepr='', offset=448, start_offset=448, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=58, arg=3, argval=3, argrepr='', offset=450, start_offset=450, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=452, start_offset=452, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=101, arg=1, argval=1, argrepr='', offset=454, start_offset=454, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='range', argrepr='range + NULL', offset=2, start_offset=2, starts_line=True, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=10, argval=10, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='GET_ITER', opcode=18, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='FOR_ITER', opcode=69, arg=32, argval=92, argrepr='to L4', offset=24, start_offset=24, starts_line=False, line_number=3, label=1, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='STORE_FAST', opcode=109, arg=0, argval='i', argrepr='i', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=40, start_offset=40, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=50, start_offset=50, starts_line=False, line_number=4, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=52, start_offset=52, starts_line=True, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=4, argval=4, argrepr='', offset=54, start_offset=54, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='COMPARE_OP', opcode=56, arg=18, argval='<', argrepr='bool(<)', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=3, argval=70, argrepr='to L2', offset=60, start_offset=60, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=64, start_offset=64, starts_line=False, line_number=5, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD', opcode=74, arg=23, argval=24, argrepr='to L1', offset=66, start_offset=66, starts_line=True, line_number=6, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=70, start_offset=70, starts_line=True, line_number=7, label=2, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=6, argval=6, argrepr='', offset=72, start_offset=72, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='COMPARE_OP', opcode=56, arg=148, argval='>', argrepr='bool(>)', offset=74, start_offset=74, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=3, argval=88, argrepr='to L3', offset=78, start_offset=78, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=82, start_offset=82, starts_line=False, line_number=7, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD', opcode=74, arg=32, argval=24, argrepr='to L1', offset=84, start_offset=84, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=88, start_offset=88, starts_line=True, line_number=8, label=3, positions=None, cache_info=None), + Instruction(opname='JUMP_FORWARD', opcode=76, arg=13, argval=118, argrepr='to L5', offset=90, start_offset=90, starts_line=False, line_number=8, label=None, positions=None, cache_info=None), + Instruction(opname='END_FOR', opcode=10, arg=None, argval=None, argrepr='', offset=92, start_offset=92, starts_line=True, line_number=3, label=4, positions=None, cache_info=None), + Instruction(opname='POP_ITER', opcode=31, arg=None, argval=None, argrepr='', offset=94, start_offset=94, starts_line=False, line_number=3, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=96, start_offset=96, starts_line=True, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=106, start_offset=106, starts_line=False, line_number=10, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=108, start_offset=108, starts_line=False, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=116, start_offset=116, starts_line=False, line_number=10, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST_CHECK', opcode=85, arg=0, argval='i', argrepr='i', offset=118, start_offset=118, starts_line=True, line_number=11, label=5, positions=None, cache_info=None), + Instruction(opname='TO_BOOL', opcode=40, arg=None, argval=None, argrepr='', offset=120, start_offset=120, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=40, argval=212, argrepr='to L8', offset=128, start_offset=128, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=132, start_offset=132, starts_line=False, line_number=11, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=134, start_offset=134, starts_line=True, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=144, start_offset=144, starts_line=False, line_number=12, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=146, start_offset=146, starts_line=False, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=154, start_offset=154, starts_line=False, line_number=12, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=156, start_offset=156, starts_line=True, line_number=13, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=1, argval=1, argrepr='', offset=158, start_offset=158, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), + Instruction(opname='BINARY_OP', opcode=45, arg=23, argval=23, argrepr='-=', offset=160, start_offset=160, starts_line=False, line_number=13, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]), + Instruction(opname='STORE_FAST', opcode=109, arg=0, argval='i', argrepr='i', offset=172, start_offset=172, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=174, start_offset=174, starts_line=True, line_number=14, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=6, argval=6, argrepr='', offset=176, start_offset=176, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), + Instruction(opname='COMPARE_OP', opcode=56, arg=148, argval='>', argrepr='bool(>)', offset=178, start_offset=178, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=3, argval=192, argrepr='to L6', offset=182, start_offset=182, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=186, start_offset=186, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD', opcode=74, arg=37, argval=118, argrepr='to L5', offset=188, start_offset=188, starts_line=True, line_number=15, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=192, start_offset=192, starts_line=True, line_number=16, label=6, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=4, argval=4, argrepr='', offset=194, start_offset=194, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), + Instruction(opname='COMPARE_OP', opcode=56, arg=18, argval='<', argrepr='bool(<)', offset=196, start_offset=196, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=3, argval=210, argrepr='to L7', offset=200, start_offset=200, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=204, start_offset=204, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD', opcode=74, arg=46, argval=118, argrepr='to L5', offset=206, start_offset=206, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='JUMP_FORWARD', opcode=76, arg=11, argval=234, argrepr='to L9', offset=210, start_offset=210, starts_line=True, line_number=17, label=7, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=212, start_offset=212, starts_line=True, line_number=19, label=8, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=222, start_offset=222, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=224, start_offset=224, starts_line=False, line_number=19, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=232, start_offset=232, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), + Instruction(opname='NOP', opcode=28, arg=None, argval=None, argrepr='', offset=234, start_offset=234, starts_line=True, line_number=20, label=9, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=1, argval=1, argrepr='', offset=236, start_offset=236, starts_line=True, line_number=21, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=0, argval=0, argrepr='', offset=238, start_offset=238, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), + Instruction(opname='BINARY_OP', opcode=45, arg=11, argval=11, argrepr='/', offset=240, start_offset=240, starts_line=False, line_number=21, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=252, start_offset=252, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=254, start_offset=254, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=1, argval=1, argrepr='', offset=256, start_offset=256, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SPECIAL', opcode=92, arg=1, argval=1, argrepr='__exit__', offset=258, start_offset=258, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='SWAP', opcode=114, arg=2, argval=2, argrepr='', offset=260, start_offset=260, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='SWAP', opcode=114, arg=3, argval=3, argrepr='', offset=262, start_offset=262, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SPECIAL', opcode=92, arg=0, argval=0, argrepr='__enter__', offset=264, start_offset=264, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=0, argval=0, argrepr='', offset=266, start_offset=266, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='STORE_FAST', opcode=109, arg=1, argval='dodgy', argrepr='dodgy', offset=274, start_offset=274, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=276, start_offset=276, starts_line=True, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval='Never reach this', argrepr="'Never reach this'", offset=286, start_offset=286, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=288, start_offset=288, starts_line=False, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=296, start_offset=296, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=298, start_offset=298, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=300, start_offset=300, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=302, start_offset=302, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=3, argval=3, argrepr='', offset=304, start_offset=304, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=312, start_offset=312, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=314, start_offset=314, starts_line=True, line_number=28, label=10, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=324, start_offset=324, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=326, start_offset=326, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=334, start_offset=334, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=336, start_offset=336, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='RETURN_VALUE', opcode=36, arg=None, argval=None, argrepr='', offset=338, start_offset=338, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=340, start_offset=340, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='WITH_EXCEPT_START', opcode=44, arg=None, argval=None, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='TO_BOOL', opcode=40, arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=2, argval=360, argrepr='to L11', offset=352, start_offset=352, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=356, start_offset=356, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=2, argval=2, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=25, label=11, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=30, arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=364, start_offset=364, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=366, start_offset=366, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=368, start_offset=368, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=29, argval=314, argrepr='to L10', offset=370, start_offset=370, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=372, start_offset=372, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=30, arg=None, argval=None, argrepr='', offset=374, start_offset=374, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=376, start_offset=376, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=378, start_offset=378, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=380, start_offset=380, starts_line=True, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='CHECK_EXC_MATCH', opcode=6, arg=None, argval=None, argrepr='', offset=390, start_offset=390, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=15, argval=426, argrepr='to L12', offset=392, start_offset=392, starts_line=False, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=29, arg=None, argval=None, argrepr='', offset=396, start_offset=396, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=398, start_offset=398, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=400, start_offset=400, starts_line=True, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=4, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=410, start_offset=410, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=412, start_offset=412, starts_line=False, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=420, start_offset=420, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=30, arg=None, argval=None, argrepr='', offset=422, start_offset=422, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=56, argval=314, argrepr='to L10', offset=424, start_offset=424, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=426, start_offset=426, starts_line=True, line_number=22, label=12, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=428, start_offset=428, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=30, arg=None, argval=None, argrepr='', offset=430, start_offset=430, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=432, start_offset=432, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=434, start_offset=434, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=436, start_offset=436, starts_line=True, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=446, start_offset=446, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=52, arg=1, argval=1, argrepr='', offset=448, start_offset=448, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=456, start_offset=456, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=458, start_offset=458, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=460, start_offset=460, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=30, arg=None, argval=None, argrepr='', offset=462, start_offset=462, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=464, start_offset=464, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), ] # One last piece of inspect fodder to check the default line number handling def simple(): pass expected_opinfo_simple = [ Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=simple.__code__.co_firstlineno, label=None, positions=None), - Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=None, argrepr='None', offset=2, start_offset=2, starts_line=False, line_number=simple.__code__.co_firstlineno, label=None), - Instruction(opname='RETURN_VALUE', opcode=34, arg=None, argval=None, argrepr='', offset=4, start_offset=4, starts_line=False, line_number=simple.__code__.co_firstlineno, label=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=None, argrepr='None', offset=2, start_offset=2, starts_line=False, line_number=simple.__code__.co_firstlineno, label=None), + Instruction(opname='RETURN_VALUE', opcode=36, arg=None, argval=None, argrepr='', offset=4, start_offset=4, starts_line=False, line_number=simple.__code__.co_firstlineno, label=None), ] @@ -2554,7 +2551,7 @@ def test_specialized_code(self): expect = ''' 0 RESUME 0 - 1 LOAD_CONST_IMMORTAL 0 (None) + 1 LOAD_CONST 0 (None) RETURN_VALUE ''' for flag in ['-S', '--specialized']: diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py index b1e165fe16b54f..a4a49298bab3be 100644 --- a/Lib/test/test_doctest/test_doctest.py +++ b/Lib/test/test_doctest/test_doctest.py @@ -2860,7 +2860,7 @@ def test_testfile(): r""" >>> _colorize.COLORIZE = save_colorize """ -class TestImporter(importlib.abc.MetaPathFinder, importlib.abc.ResourceLoader): +class TestImporter(importlib.abc.MetaPathFinder): def find_spec(self, fullname, path, target=None): return importlib.util.spec_from_file_location(fullname, path, loader=self) @@ -2869,6 +2869,12 @@ def get_data(self, path): with open(path, mode='rb') as f: return f.read() + def exec_module(self, module): + raise ImportError + + def create_module(self, spec): + return None + class TestHook: def __init__(self, pathdir): diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py index 95224e19f67ce5..d60a7039f9d4c6 100644 --- a/Lib/test/test_email/test__header_value_parser.py +++ b/Lib/test/test_email/test__header_value_parser.py @@ -3082,13 +3082,40 @@ def test_address_list_with_list_separator_after_fold(self): self._test(parser.get_address_list(to)[0], f'{a},\n =?utf-8?q?H=C3=BCbsch?= Kaktus \n') - a = '.' * 79 + a = '.' * 79 # ('.' is a special, so must be in quoted-string.) to = f'"{a}" , "Hübsch Kaktus" ' self._test(parser.get_address_list(to)[0], - f'{a}\n' + f'"{a}"\n' ' , =?utf-8?q?H=C3=BCbsch?= Kaktus ' '\n') + def test_address_list_with_specials_in_long_quoted_string(self): + # Regression for gh-80222. + policy = self.policy.clone(max_line_length=40) + cases = [ + # (to, folded) + ('"Exfiltrator (unclosed comment?" ', + '"Exfiltrator (unclosed\n' + ' comment?" \n'), + ('"Escaped \\" chars \\\\ in quoted-string stay escaped" ', + '"Escaped \\" chars \\\\ in quoted-string\n' + ' stay escaped" \n'), + ('This long display name does not need quotes ', + 'This long display name does not need\n' + ' quotes \n'), + ('"Quotes are not required but are retained here" ', + '"Quotes are not required but are\n' + ' retained here" \n'), + ('"A quoted-string, it can be a valid local-part"@example.com', + '"A quoted-string, it can be a valid\n' + ' local-part"@example.com\n'), + ('"local-part-with-specials@but-no-fws.cannot-fold"@example.com', + '"local-part-with-specials@but-no-fws.cannot-fold"@example.com\n'), + ] + for (to, folded) in cases: + with self.subTest(to=to): + self._test(parser.get_address_list(to)[0], folded, policy=policy) + # XXX Need tests with comments on various sides of a unicode token, # and with unicode tokens in the comments. Spaces inside the quotes # currently don't do the right thing. diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 7110fb889f3c8e..cd65496cafb04d 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -51,6 +51,14 @@ MAX_HASH_SEED = 4294967295 ABI_THREAD = 't' if sysconfig.get_config_var('Py_GIL_DISABLED') else '' +# PLATSTDLIB_LANDMARK copied from Modules/getpath.py +if os.name == 'nt': + PLATSTDLIB_LANDMARK = f'{sys.platlibdir}' +else: + VERSION_MAJOR = sys.version_info.major + VERSION_MINOR = sys.version_info.minor + PLATSTDLIB_LANDMARK = (f'{sys.platlibdir}/python{VERSION_MAJOR}.' + f'{VERSION_MINOR}{ABI_THREAD}/lib-dynload') # If we are running from a build dir, but the stdlib has been installed, @@ -376,11 +384,14 @@ def test_simple_initialization_api(self): def test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE(self): # https://github.com/python/cpython/issues/92031 - code = textwrap.dedent("""\ + _testinternalcapi = import_helper.import_module("_testinternalcapi") + + code = textwrap.dedent(f"""\ import dis import importlib._bootstrap import opcode import test.test_dis + import test.support def is_specialized(f): for instruction in dis.get_instructions(f, adaptive=True): @@ -399,11 +410,11 @@ def is_specialized(f): func = importlib._bootstrap._handle_fromlist # "copy" the code to un-specialize it: - func.__code__ = func.__code__.replace() + test.support.reset_code(func) assert not is_specialized(func), "specialized instructions found" - for i in range(test.test_dis.ADAPTIVE_WARMUP_DELAY): + for _ in range({_testinternalcapi.SPECIALIZATION_THRESHOLD}): func(importlib._bootstrap, ["x"], lambda *args: None) assert is_specialized(func), "no specialized instructions found" @@ -940,6 +951,7 @@ def check_all_configs(self, testname, expected_config=None, self.check_global_config(configs) return configs + @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries") def test_init_default_config(self): self.check_all_configs("test_init_initialize_config", api=API_COMPAT) @@ -1039,6 +1051,7 @@ def test_init_from_config(self): self.check_all_configs("test_init_from_config", config, preconfig, api=API_COMPAT) + @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries") def test_init_compat_env(self): preconfig = { 'allocator': ALLOCATOR_FOR_CONFIG, @@ -1047,7 +1060,6 @@ def test_init_compat_env(self): 'use_hash_seed': True, 'hash_seed': 42, 'tracemalloc': 2, - 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'malloc_stats': True, @@ -1074,6 +1086,7 @@ def test_init_compat_env(self): self.check_all_configs("test_init_compat_env", config, preconfig, api=API_COMPAT) + @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries") def test_init_python_env(self): preconfig = { 'allocator': ALLOCATOR_FOR_CONFIG, @@ -1083,7 +1096,6 @@ def test_init_python_env(self): 'use_hash_seed': True, 'hash_seed': 42, 'tracemalloc': 2, - 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'malloc_stats': True, @@ -1271,24 +1283,6 @@ def test_init_run_main(self): } self.check_all_configs("test_init_run_main", config, api=API_PYTHON) - def test_init_main(self): - code = ('import _testinternalcapi, json; ' - 'print(json.dumps(_testinternalcapi.get_configs()))') - config = { - 'argv': ['-c', 'arg2'], - 'orig_argv': ['python3', - '-c', code, - 'arg2'], - 'program_name': './python3', - 'run_command': code + '\n', - 'parse_argv': True, - '_init_main': False, - 'sys_path_0': '', - } - self.check_all_configs("test_init_main", config, - api=API_PYTHON, - stderr="Run Python code before _Py_InitializeMain") - def test_init_parse_argv(self): config = { 'parse_argv': True, @@ -1613,7 +1607,13 @@ def test_init_pyvenv_cfg(self): with self.tmpdir_with_python() as tmpdir, \ tempfile.TemporaryDirectory() as pyvenv_home: + ver = sys.version_info + base_prefix = sysconfig.get_config_var("prefix") + + # gh-128690: base_exec_prefix depends if PLATSTDLIB_LANDMARK exists + platstdlib = os.path.join(base_prefix, PLATSTDLIB_LANDMARK) + change_exec_prefix = not os.path.isdir(platstdlib) if not MS_WINDOWS: lib_dynload = os.path.join(pyvenv_home, @@ -1637,7 +1637,8 @@ def test_init_pyvenv_cfg(self): paths = self.module_search_paths() if not MS_WINDOWS: - paths[-1] = lib_dynload + if change_exec_prefix: + paths[-1] = lib_dynload else: paths = [ os.path.join(tmpdir, os.path.basename(paths[0])), @@ -1647,16 +1648,16 @@ def test_init_pyvenv_cfg(self): executable = self.test_exe base_executable = os.path.join(pyvenv_home, os.path.basename(executable)) - exec_prefix = pyvenv_home config = { - 'base_prefix': sysconfig.get_config_var("prefix"), - 'base_exec_prefix': exec_prefix, + 'base_prefix': base_prefix, 'exec_prefix': tmpdir, 'prefix': tmpdir, 'base_executable': base_executable, 'executable': executable, 'module_search_paths': paths, } + if change_exec_prefix: + config['base_exec_prefix'] = pyvenv_home if MS_WINDOWS: config['base_prefix'] = pyvenv_home config['stdlib_dir'] = os.path.join(pyvenv_home, 'Lib') @@ -1763,15 +1764,7 @@ def test_init_warnoptions(self): self.check_all_configs("test_init_warnoptions", config, preconfig, api=API_PYTHON) - def test_init_set_config(self): - config = { - '_init_main': 0, - 'bytes_warning': 2, - 'warnoptions': ['error::BytesWarning'], - } - self.check_all_configs("test_init_set_config", config, - api=API_ISOLATED) - + @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries") def test_initconfig_api(self): preconfig = { 'configure_locale': True, @@ -1862,22 +1855,6 @@ def test_init_in_background_thread(self): self.assertEqual(err, "") -class SetConfigTests(unittest.TestCase): - def test_set_config(self): - # bpo-42260: Test _PyInterpreterState_SetConfig() - import_helper.import_module('_testcapi') - cmd = [sys.executable, '-X', 'utf8', '-I', '-m', 'test._test_embed_set_config'] - proc = subprocess.run(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - encoding='utf-8', errors='backslashreplace') - if proc.returncode and support.verbose: - print(proc.stdout) - print(proc.stderr) - self.assertEqual(proc.returncode, 0, - (proc.returncode, proc.stdout, proc.stderr)) - - class AuditingTests(EmbeddingTestsMixin, unittest.TestCase): def test_open_code_hook(self): self.run_embedded_interpreter("test_open_code_hook") @@ -2009,56 +1986,5 @@ def test_presite(self): self.assertIn("unique-python-message", out) -class StdPrinterTests(EmbeddingTestsMixin, unittest.TestCase): - # Test PyStdPrinter_Type which is used by _PySys_SetPreliminaryStderr(): - # "Set up a preliminary stderr printer until we have enough - # infrastructure for the io module in place." - - STDOUT_FD = 1 - - def create_printer(self, fd): - ctypes = import_helper.import_module('ctypes') - PyFile_NewStdPrinter = ctypes.pythonapi.PyFile_NewStdPrinter - PyFile_NewStdPrinter.argtypes = (ctypes.c_int,) - PyFile_NewStdPrinter.restype = ctypes.py_object - return PyFile_NewStdPrinter(fd) - - def test_write(self): - message = "unicode:\xe9-\u20ac-\udc80!\n" - - stdout_fd = self.STDOUT_FD - stdout_fd_copy = os.dup(stdout_fd) - self.addCleanup(os.close, stdout_fd_copy) - - rfd, wfd = os.pipe() - self.addCleanup(os.close, rfd) - self.addCleanup(os.close, wfd) - try: - # PyFile_NewStdPrinter() only accepts fileno(stdout) - # or fileno(stderr) file descriptor. - os.dup2(wfd, stdout_fd) - - printer = self.create_printer(stdout_fd) - printer.write(message) - finally: - os.dup2(stdout_fd_copy, stdout_fd) - - data = os.read(rfd, 100) - self.assertEqual(data, message.encode('utf8', 'backslashreplace')) - - def test_methods(self): - fd = self.STDOUT_FD - printer = self.create_printer(fd) - self.assertEqual(printer.fileno(), fd) - self.assertEqual(printer.isatty(), os.isatty(fd)) - printer.flush() # noop - printer.close() # noop - - def test_disallow_instantiation(self): - fd = self.STDOUT_FD - printer = self.create_printer(fd) - support.check_disallow_instantiation(self, type(printer)) - - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index e377383450e19d..582e5b6de6e687 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -2,7 +2,7 @@ import sys from codecs import BOM_UTF8 -from test import support +from test.support import force_not_colorized from test.support import os_helper from test.support import script_helper from test.support import warnings_helper @@ -44,6 +44,7 @@ def test_EOFS(self): self.assertEqual(cm.exception.text, "ä = '''thîs is ") self.assertEqual(cm.exception.offset, 5) + @force_not_colorized def test_EOFS_with_file(self): expect = ("(, line 1)") with os_helper.temp_dir() as temp_dir: @@ -123,6 +124,7 @@ def test_line_continuation_EOF(self): self.assertEqual(str(cm.exception), expect) @unittest.skipIf(not sys.executable, "sys.executable required") + @force_not_colorized def test_line_continuation_EOF_from_file_bpo2180(self): """Ensure tok_nextc() does not add too many ending newlines.""" with os_helper.temp_dir() as temp_dir: diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 6ccfa9575f8569..2d324827451b54 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -1465,6 +1465,7 @@ def gen(): @cpython_only @unittest.skipIf(_testcapi is None, "requires _testcapi") + @force_not_colorized def test_recursion_normalizing_infinite_exception(self): # Issue #30697. Test that a RecursionError is raised when # maximum recursion depth has been exceeded when creating @@ -2180,6 +2181,7 @@ def test_multiline_not_highlighted(self): self.assertEqual(result[-len(expected):], expected) +@support.force_not_colorized_test_class class SyntaxErrorTests(unittest.TestCase): maxDiff = None @@ -2274,6 +2276,7 @@ def test_range_of_offsets(self): self.assertIn(expected, err.getvalue()) the_exception = exc + @force_not_colorized def test_subclass(self): class MySyntaxError(SyntaxError): pass diff --git a/Lib/test/test_external_inspection.py b/Lib/test/test_external_inspection.py index d896fec73d1971..2ab48a4778be4d 100644 --- a/Lib/test/test_external_inspection.py +++ b/Lib/test/test_external_inspection.py @@ -13,8 +13,10 @@ try: from _testexternalinspection import PROCESS_VM_READV_SUPPORTED from _testexternalinspection import get_stack_trace + from _testexternalinspection import get_async_stack_trace except ImportError: - raise unittest.SkipTest("Test only runs when _testexternalinspection is available") + raise unittest.SkipTest( + "Test only runs when _testexternalinspection is available") def _make_test_script(script_dir, script_basename, source): to_return = make_script(script_dir, script_basename, source) @@ -23,12 +25,14 @@ def _make_test_script(script_dir, script_basename, source): class TestGetStackTrace(unittest.TestCase): - @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", "Test only runs on Linux and MacOS") - @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, "Test only runs on Linux with process_vm_readv support") + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") def test_remote_stack_trace(self): # Spawn a process with some realistic Python code script = textwrap.dedent("""\ - import time, sys, os + import time, sys def bar(): for x in range(100): if x == 50: @@ -37,8 +41,8 @@ def baz(): foo() def foo(): - fifo = sys.argv[1] - with open(sys.argv[1], "w") as fifo: + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: fifo.write("ready") time.sleep(1000) @@ -74,8 +78,281 @@ def foo(): ] self.assertEqual(stack_trace, expected_stack_trace) - @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", "Test only runs on Linux and MacOS") - @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, "Test only runs on Linux with process_vm_readv support") + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_async_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio + import time + import sys + + def c5(): + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def c4(): + await asyncio.sleep(0) + c5() + + async def c3(): + await c4() + + async def c2(): + await c3() + + async def c1(task): + await task + + async def main(): + async with asyncio.TaskGroup() as tg: + task = tg.create_task(c2(), name="c2_root") + tg.create_task(c1(task), name="sub_main_1") + tg.create_task(c1(task), name="sub_main_2") + + def new_eager_loop(): + loop = asyncio.new_event_loop() + eager_task_factory = asyncio.create_eager_task_factory( + asyncio.Task) + loop.set_task_factory(eager_task_factory) + return loop + + asyncio.run(main(), loop_factory={TASK_FACTORY}) + """) + stack_trace = None + for task_factory_variant in "asyncio.new_event_loop", "new_eager_loop": + with ( + self.subTest(task_factory_variant=task_factory_variant), + os_helper.temp_dir() as work_dir, + ): + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script( + script_dir, 'script', + script.format(TASK_FACTORY=task_factory_variant)) + try: + p = subprocess.Popen( + [sys.executable, script_name, str(fifo)] + ) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest( + "Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + root_task = "Task-1" + expected_stack_trace = [ + ["c5", "c4", "c3", "c2"], + "c2_root", + [ + [["main"], root_task, []], + [["c1"], "sub_main_1", [[["main"], root_task, []]]], + [["c1"], "sub_main_2", [[["main"], root_task, []]]], + ], + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_asyncgen_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio + import time + import sys + + async def gen_nested_call(): + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def gen(): + for num in range(2): + yield num + if num == 1: + await gen_nested_call() + + async def main(): + async for el in gen(): + pass + + asyncio.run(main()) + """) + stack_trace = None + with os_helper.temp_dir() as work_dir: + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script(script_dir, 'script', script) + try: + p = subprocess.Popen([sys.executable, script_name, str(fifo)]) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest("Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + expected_stack_trace = [ + ['gen_nested_call', 'gen', 'main'], 'Task-1', [] + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_async_gather_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio + import time + import sys + + async def deep(): + await asyncio.sleep(0) + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def c2(): + await asyncio.sleep(0) + + async def main(): + await asyncio.gather(c1(), c2()) + + asyncio.run(main()) + """) + stack_trace = None + with os_helper.temp_dir() as work_dir: + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script(script_dir, 'script', script) + try: + p = subprocess.Popen([sys.executable, script_name, str(fifo)]) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest( + "Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + expected_stack_trace = [ + ['deep', 'c1'], 'Task-2', [[['main'], 'Task-1', []]] + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_async_staggered_race_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio.staggered + import time + import sys + + async def deep(): + await asyncio.sleep(0) + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def c2(): + await asyncio.sleep(10000) + + async def main(): + await asyncio.staggered.staggered_race( + [c1, c2], + delay=None, + ) + + asyncio.run(main()) + """) + stack_trace = None + with os_helper.temp_dir() as work_dir: + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script(script_dir, 'script', script) + try: + p = subprocess.Popen([sys.executable, script_name, str(fifo)]) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest( + "Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + expected_stack_trace = [ + ['deep', 'c1', 'run_one_coro'], 'Task-2', [[['main'], 'Task-1', []]] + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") def test_self_trace(self): stack_trace = get_stack_trace(os.getpid()) self.assertEqual(stack_trace[0], "test_self_trace") diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py index 2088793cbb9387..75d303cd212c82 100644 --- a/Lib/test/test_faulthandler.py +++ b/Lib/test/test_faulthandler.py @@ -101,8 +101,7 @@ def check_error(self, code, lineno, fatal_error, *, Raise an error if the output doesn't match the expected format. """ all_threads_disabled = ( - (not py_fatal_error) - and all_threads + all_threads and (not sys._is_gil_enabled()) ) if all_threads and not all_threads_disabled: @@ -116,12 +115,15 @@ def check_error(self, code, lineno, fatal_error, *, if py_fatal_error: regex.append("Python runtime state: initialized") regex.append('') - if all_threads_disabled: + if all_threads_disabled and not py_fatal_error: regex.append("") regex.append(fr'{header} \(most recent call first\):') - if garbage_collecting and not all_threads_disabled: - regex.append(' Garbage-collecting') - regex.append(fr' File "", line {lineno} in {function}') + if support.Py_GIL_DISABLED and py_fatal_error and not know_current_thread: + regex.append(" ") + else: + if garbage_collecting and not all_threads_disabled: + regex.append(' Garbage-collecting') + regex.append(fr' File "", line {lineno} in {function}') regex = '\n'.join(regex) if other_regex: diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index 11f191700ccef0..4d086064023488 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -222,6 +222,56 @@ def test_f_lineno_del_segfault(self): with self.assertRaises(AttributeError): del f.f_lineno + def test_f_generator(self): + # Test f_generator in different contexts. + + def t0(): + def nested(): + frame = sys._getframe() + return frame.f_generator + + def gen(): + yield nested() + + g = gen() + try: + return next(g) + finally: + g.close() + + def t1(): + frame = sys._getframe() + return frame.f_generator + + def t2(): + frame = sys._getframe() + yield frame.f_generator + + async def t3(): + frame = sys._getframe() + return frame.f_generator + + # For regular functions f_generator is None + self.assertIsNone(t0()) + self.assertIsNone(t1()) + + # For generators f_generator is equal to self + g = t2() + try: + frame_g = next(g) + self.assertIs(g, frame_g) + finally: + g.close() + + # Ditto for coroutines + c = t3() + try: + c.send(None) + except StopIteration as ex: + self.assertIs(ex.value, c) + else: + raise AssertionError('coroutine did not exit') + class ReprTest(unittest.TestCase): """ @@ -723,51 +773,6 @@ def f(): self.assertIs(catcher.unraisable.exc_type, TypeError) self.assertIsNone(weak()) -@unittest.skipIf(_testcapi is None, 'need _testcapi') -class TestCAPI(unittest.TestCase): - def getframe(self): - return sys._getframe() - - def test_frame_getters(self): - frame = self.getframe() - self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) - self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) - self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) - self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) - - def test_getvar(self): - current_frame = sys._getframe() - x = 1 - self.assertEqual(_testcapi.frame_getvar(current_frame, "x"), 1) - self.assertEqual(_testcapi.frame_getvarstring(current_frame, b"x"), 1) - with self.assertRaises(NameError): - _testcapi.frame_getvar(current_frame, "y") - with self.assertRaises(NameError): - _testcapi.frame_getvarstring(current_frame, b"y") - - # wrong name type - with self.assertRaises(TypeError): - _testcapi.frame_getvar(current_frame, b'x') - with self.assertRaises(TypeError): - _testcapi.frame_getvar(current_frame, 123) - - def getgenframe(self): - yield sys._getframe() - - def test_frame_get_generator(self): - gen = self.getgenframe() - frame = next(gen) - self.assertIs(gen, _testcapi.frame_getgenerator(frame)) - - def test_frame_fback_api(self): - """Test that accessing `f_back` does not cause a segmentation fault on - a frame created with `PyFrame_New` (GH-99110).""" - def dummy(): - pass - - frame = _testcapi.frame_new(dummy.__code__, globals(), locals()) - # The following line should not cause a segmentation fault. - self.assertIsNone(frame.f_back) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_free_threading/test_dict.py b/Lib/test/test_free_threading/test_dict.py index 13717cb39fa35d..4f605e0c51f0d5 100644 --- a/Lib/test/test_free_threading/test_dict.py +++ b/Lib/test/test_free_threading/test_dict.py @@ -5,7 +5,7 @@ from ast import Or from functools import partial -from threading import Thread +from threading import Barrier, Thread from unittest import TestCase try: @@ -142,6 +142,27 @@ def writer_func(l): for ref in thread_list: self.assertIsNone(ref()) + def test_racing_get_set_dict(self): + """Races getting and setting a dict should be thread safe""" + THREAD_COUNT = 10 + barrier = Barrier(THREAD_COUNT) + def work(d): + barrier.wait() + for _ in range(1000): + d[10] = 0 + d.get(10, None) + _ = d[10] + + d = {} + worker_threads = [] + for ii in range(THREAD_COUNT): + worker_threads.append(Thread(target=work, args=[d])) + for t in worker_threads: + t.start() + for t in worker_threads: + t.join() + + def test_racing_set_object_dict(self): """Races assigning to __dict__ should be thread safe""" class C: pass diff --git a/Lib/test/test_free_threading/test_races.py b/Lib/test/test_free_threading/test_races.py index 69982558a067a5..85aa69c8cd494f 100644 --- a/Lib/test/test_free_threading/test_races.py +++ b/Lib/test/test_free_threading/test_races.py @@ -270,6 +270,21 @@ def mutate(): do_race(set_value, mutate) + def test_racing_recursion_limit(self): + def something_recursive(): + def count(n): + if n > 0: + return count(n - 1) + 1 + return 0 + + count(50) + + def set_recursion_limit(): + for limit in range(100, 200): + sys.setrecursionlimit(limit) + + do_race(something_recursive, set_recursion_limit) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index c359f2ecce01f1..1d96b7a2c2459b 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -1649,6 +1649,14 @@ def __repr__(self): #self.assertEqual(f'X{x =}Y', 'Xx\t='+repr(x)+'Y') #self.assertEqual(f'X{x = }Y', 'Xx\t=\t'+repr(x)+'Y') + def test_debug_expressions_are_raw_strings(self): + + self.assertEqual(f'{b"\N{OX}"=}', 'b"\\N{OX}"=b\'\\\\N{OX}\'') + self.assertEqual(f'{r"\xff"=}', 'r"\\xff"=\'\\\\xff\'') + self.assertEqual(f'{r"\n"=}', 'r"\\n"=\'\\\\n\'') + self.assertEqual(f"{'\''=}", "'\\''=\"'\"") + self.assertEqual(f'{'\xc5'=}', r"'\xc5'='Ã…'") + def test_walrus(self): x = 20 # This isn't an assignment expression, it's 'x', with a format @@ -1757,5 +1765,23 @@ def get_code(s): for s in ["", "some string"]: self.assertEqual(get_code(f"'{s}'"), get_code(f"f'{s}'")) + def test_gh129093(self): + self.assertEqual(f'{1==2=}', '1==2=False') + self.assertEqual(f'{1 == 2=}', '1 == 2=False') + self.assertEqual(f'{1!=2=}', '1!=2=True') + self.assertEqual(f'{1 != 2=}', '1 != 2=True') + + self.assertEqual(f'{(1) != 2=}', '(1) != 2=True') + self.assertEqual(f'{(1*2) != (3)=}', '(1*2) != (3)=True') + + self.assertEqual(f'{1 != 2 == 3 != 4=}', '1 != 2 == 3 != 4=False') + self.assertEqual(f'{1 == 2 != 3 == 4=}', '1 == 2 != 3 == 4=False') + + self.assertEqual(f'{f'{1==2=}'=}', "f'{1==2=}'='1==2=False'") + self.assertEqual(f'{f'{1 == 2=}'=}', "f'{1 == 2=}'='1 == 2=False'") + self.assertEqual(f'{f'{1!=2=}'=}', "f'{1!=2=}'='1!=2=True'") + self.assertEqual(f'{f'{1 != 2=}'=}', "f'{1 != 2=}'='1 != 2=True'") + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 32224866082824..4beb4380c3ad6b 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -645,11 +645,11 @@ def test_bound_method_introspection(self): def test_unbound_method_retrieval(self): obj = self.A - self.assertFalse(hasattr(obj.both, "__self__")) - self.assertFalse(hasattr(obj.nested, "__self__")) - self.assertFalse(hasattr(obj.over_partial, "__self__")) - self.assertFalse(hasattr(obj.static, "__self__")) - self.assertFalse(hasattr(self.a.static, "__self__")) + self.assertNotHasAttr(obj.both, "__self__") + self.assertNotHasAttr(obj.nested, "__self__") + self.assertNotHasAttr(obj.over_partial, "__self__") + self.assertNotHasAttr(obj.static, "__self__") + self.assertNotHasAttr(self.a.static, "__self__") def test_descriptors(self): for obj in [self.A, self.a]: @@ -791,7 +791,7 @@ def wrapper(): self.assertNotEqual(wrapper.__qualname__, f.__qualname__) self.assertEqual(wrapper.__doc__, None) self.assertEqual(wrapper.__annotations__, {}) - self.assertFalse(hasattr(wrapper, 'attr')) + self.assertNotHasAttr(wrapper, 'attr') def test_selective_update(self): def f(): @@ -840,7 +840,7 @@ def wrapper(): pass functools.update_wrapper(wrapper, max) self.assertEqual(wrapper.__name__, 'max') - self.assertTrue(wrapper.__doc__.startswith('max(')) + self.assertStartsWith(wrapper.__doc__, 'max(') self.assertEqual(wrapper.__annotations__, {}) def test_update_type_wrapper(self): @@ -910,7 +910,7 @@ def wrapper(): self.assertEqual(wrapper.__name__, 'wrapper') self.assertNotEqual(wrapper.__qualname__, f.__qualname__) self.assertEqual(wrapper.__doc__, None) - self.assertFalse(hasattr(wrapper, 'attr')) + self.assertNotHasAttr(wrapper, 'attr') def test_selective_update(self): def f(): @@ -2666,15 +2666,15 @@ def _(self, arg): a.t(0) self.assertEqual(a.arg, "int") aa = A() - self.assertFalse(hasattr(aa, 'arg')) + self.assertNotHasAttr(aa, 'arg') a.t('') self.assertEqual(a.arg, "str") aa = A() - self.assertFalse(hasattr(aa, 'arg')) + self.assertNotHasAttr(aa, 'arg') a.t(0.0) self.assertEqual(a.arg, "base") aa = A() - self.assertFalse(hasattr(aa, 'arg')) + self.assertNotHasAttr(aa, 'arg') def test_staticmethod_register(self): class A: @@ -3036,16 +3036,16 @@ def i(arg): @i.register(42) def _(arg): return "I annotated with a non-type" - self.assertTrue(str(exc.exception).startswith(msg_prefix + "42")) - self.assertTrue(str(exc.exception).endswith(msg_suffix)) + self.assertStartsWith(str(exc.exception), msg_prefix + "42") + self.assertEndsWith(str(exc.exception), msg_suffix) with self.assertRaises(TypeError) as exc: @i.register def _(arg): return "I forgot to annotate" - self.assertTrue(str(exc.exception).startswith(msg_prefix + + self.assertStartsWith(str(exc.exception), msg_prefix + "._" - )) - self.assertTrue(str(exc.exception).endswith(msg_suffix)) + ) + self.assertEndsWith(str(exc.exception), msg_suffix) with self.assertRaises(TypeError) as exc: @i.register @@ -3055,23 +3055,23 @@ def _(arg: typing.Iterable[str]): # types from `typing`. Instead, annotate with regular types # or ABCs. return "I annotated with a generic collection" - self.assertTrue(str(exc.exception).startswith( + self.assertStartsWith(str(exc.exception), "Invalid annotation for 'arg'." - )) - self.assertTrue(str(exc.exception).endswith( + ) + self.assertEndsWith(str(exc.exception), 'typing.Iterable[str] is not a class.' - )) + ) with self.assertRaises(TypeError) as exc: @i.register def _(arg: typing.Union[int, typing.Iterable[str]]): return "Invalid Union" - self.assertTrue(str(exc.exception).startswith( + self.assertStartsWith(str(exc.exception), "Invalid annotation for 'arg'." - )) - self.assertTrue(str(exc.exception).endswith( + ) + self.assertEndsWith(str(exc.exception), 'typing.Union[int, typing.Iterable[str]] not all arguments are classes.' - )) + ) def test_invalid_positional_argument(self): @functools.singledispatch diff --git a/Lib/test/test_gdb/util.py b/Lib/test/test_gdb/util.py index 8fe9cfc543395e..8097fd52ababe6 100644 --- a/Lib/test/test_gdb/util.py +++ b/Lib/test/test_gdb/util.py @@ -280,11 +280,6 @@ def get_stack_trace(self, source=None, script=None, return out - def assertEndsWith(self, actual, exp_end): - '''Ensure that the given "actual" string ends with "exp_end"''' - self.assertTrue(actual.endswith(exp_end), - msg='%r did not end with %r' % (actual, exp_end)) - def assertMultilineMatches(self, actual, pattern): m = re.match(pattern, actual, re.DOTALL) if not m: diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 9c65e81dfe4be1..35600ce5486642 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -281,12 +281,12 @@ def run_cases_test(self, input: str, expected: str): ) with open(self.temp_output_filename) as temp_output: - lines = temp_output.readlines() - while lines and lines[0].startswith(("// ", "#", " #", "\n")): - lines.pop(0) - while lines and lines[-1].startswith(("#", "\n")): - lines.pop(-1) - actual = "".join(lines) + lines = temp_output.read() + _, rest = lines.split(tier1_generator.INSTRUCTION_START_MARKER) + instructions, labels_with_prelude_and_postlude = rest.split(tier1_generator.INSTRUCTION_END_MARKER) + _, labels_with_postlude = labels_with_prelude_and_postlude.split(tier1_generator.LABEL_START_MARKER) + labels, _ = labels_with_postlude.split(tier1_generator.LABEL_END_MARKER) + actual = instructions + labels # if actual.strip() != expected.strip(): # print("Actual:") # print(actual) @@ -445,7 +445,7 @@ def test_predictions(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP1); - PREDICTED(OP1); + PREDICTED_OP1:; _PyStackRef res; res = Py_None; stack_pointer[-1] = res; @@ -538,7 +538,9 @@ def test_error_if_plain(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP); - if (cond) goto label; + if (cond) { + goto label; + } DISPATCH(); } """ @@ -555,7 +557,9 @@ def test_error_if_plain_with_comment(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP); - if (cond) goto label; + if (cond) { + goto label; + } // Comment is ok DISPATCH(); } @@ -582,7 +586,9 @@ def test_error_if_pop(self): right = stack_pointer[-1]; left = stack_pointer[-2]; SPAM(left, right); - if (cond) goto pop_2_label; + if (cond) { + goto pop_2_label; + } res = 0; stack_pointer[-2] = res; stack_pointer += -1; @@ -611,7 +617,9 @@ def test_error_if_pop_with_result(self): right = stack_pointer[-1]; left = stack_pointer[-2]; res = SPAM(left, right); - if (cond) goto pop_2_label; + if (cond) { + goto pop_2_label; + } stack_pointer[-2] = res; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -679,7 +687,7 @@ def test_macro_instruction(self): frame->instr_ptr = next_instr; next_instr += 6; INSTRUCTION_STATS(OP); - PREDICTED(OP); + PREDICTED_OP:; _Py_CODEUNIT* const this_instr = next_instr - 6; (void)this_instr; _PyStackRef left; @@ -1392,7 +1400,9 @@ def test_pop_on_error_peeks(self): // THIRD { // Mark j and k as used - if (cond) goto pop_2_error; + if (cond) { + goto pop_2_error; + } } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -1639,12 +1649,16 @@ def test_escaping_call_next_to_cmacro(self): """ self.run_cases_test(input, output) - def test_pop_dead_inputs_all_live(self): + def test_pystackref_frompyobject_new_next_to_cmacro(self): input = """ - inst(OP, (a, b --)) { - POP_DEAD_INPUTS(); - HAM(a, b); - INPUTS_DEAD(); + inst(OP, (-- out1, out2)) { + PyObject *obj = SPAM(); + #ifdef Py_GIL_DISABLED + out1 = PyStackRef_FromPyObjectNew(obj); + #else + out1 = PyStackRef_FromPyObjectNew(obj); + #endif + out2 = PyStackRef_FromPyObjectNew(obj); } """ output = """ @@ -1652,22 +1666,28 @@ def test_pop_dead_inputs_all_live(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP); - _PyStackRef a; - _PyStackRef b; - b = stack_pointer[-1]; - a = stack_pointer[-2]; - HAM(a, b); - stack_pointer += -2; + _PyStackRef out1; + _PyStackRef out2; + PyObject *obj = SPAM(); + #ifdef Py_GIL_DISABLED + out1 = PyStackRef_FromPyObjectNew(obj); + #else + out1 = PyStackRef_FromPyObjectNew(obj); + #endif + out2 = PyStackRef_FromPyObjectNew(obj); + stack_pointer[0] = out1; + stack_pointer[1] = out2; + stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } """ self.run_cases_test(input, output) - def test_pop_dead_inputs_some_live(self): + def test_pop_input(self): input = """ - inst(OP, (a, b, c --)) { - POP_DEAD_INPUTS(); + inst(OP, (a, b --)) { + POP_INPUT(b); HAM(a); INPUTS_DEAD(); } @@ -1678,8 +1698,10 @@ def test_pop_dead_inputs_some_live(self): next_instr += 1; INSTRUCTION_STATS(OP); _PyStackRef a; - a = stack_pointer[-3]; - stack_pointer += -2; + _PyStackRef b; + b = stack_pointer[-1]; + a = stack_pointer[-2]; + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); HAM(a); stack_pointer += -1; @@ -1689,30 +1711,116 @@ def test_pop_dead_inputs_some_live(self): """ self.run_cases_test(input, output) - def test_pop_dead_inputs_with_output(self): + def test_pop_input_with_empty_stack(self): + input = """ + inst(OP, (--)) { + POP_INPUT(foo); + } + """ + with self.assertRaises(SyntaxError): + self.run_cases_test(input, "") + + def test_pop_input_with_non_tos(self): + input = """ + inst(OP, (a, b --)) { + POP_INPUT(a); + } + """ + with self.assertRaises(SyntaxError): + self.run_cases_test(input, "") + + def test_no_escaping_calls_in_branching_macros(self): + + input = """ + inst(OP, ( -- )) { + DEOPT_IF(escaping_call()); + } + """ + with self.assertRaises(SyntaxError): + self.run_cases_test(input, "") + + input = """ + inst(OP, ( -- )) { + EXIT_IF(escaping_call()); + } + """ + with self.assertRaises(SyntaxError): + self.run_cases_test(input, "") + + input = """ + inst(OP, ( -- )) { + ERROR_IF(escaping_call(), error); + } + """ + with self.assertRaises(SyntaxError): + self.run_cases_test(input, "") + + def test_kill_in_wrong_order(self): input = """ inst(OP, (a, b -- c)) { - POP_DEAD_INPUTS(); - c = SPAM(); + c = b; + PyStackRef_CLOSE(a); + PyStackRef_CLOSE(b); + } + """ + with self.assertRaises(SyntaxError): + self.run_cases_test(input, "") + + def test_complex_label(self): + input = """ + label(my_label) { + // Comment + do_thing() + if (complex) { + goto other_label; + } + goto other_label2; } """ + output = """ - TARGET(OP) { - frame->instr_ptr = next_instr; - next_instr += 1; - INSTRUCTION_STATS(OP); - _PyStackRef c; - stack_pointer += -2; - assert(WITHIN_STACK_BOUNDS()); - c = SPAM(); - stack_pointer[0] = c; - stack_pointer += 1; - assert(WITHIN_STACK_BOUNDS()); - DISPATCH(); + my_label: + { + // Comment + do_thing() + if (complex) { + goto other_label; + } + goto other_label2; } """ self.run_cases_test(input, output) + def test_multiple_labels(self): + input = """ + label(my_label_1) { + // Comment + do_thing1(); + goto my_label_2; + } + + label(my_label_2) { + // Comment + do_thing2(); + goto my_label_3; + } + """ + + output = """ + my_label_1: + { + // Comment + do_thing1(); + goto my_label_2; + } + + my_label_2: + { + // Comment + do_thing2(); + goto my_label_3; + } + """ class TestGeneratedAbstractCases(unittest.TestCase): def setUp(self) -> None: @@ -1799,8 +1907,8 @@ def test_overridden_abstract_args(self): """ output = """ case OP: { - _Py_UopsSymbol *arg1; - _Py_UopsSymbol *out; + JitOptSymbol *arg1; + JitOptSymbol *out; arg1 = stack_pointer[-1]; out = EGGS(arg1); stack_pointer[-1] = out; @@ -1808,7 +1916,7 @@ def test_overridden_abstract_args(self): } case OP2: { - _Py_UopsSymbol *out; + JitOptSymbol *out; out = sym_new_not_null(ctx); stack_pointer[-1] = out; break; @@ -1833,14 +1941,14 @@ def test_no_overridden_case(self): """ output = """ case OP: { - _Py_UopsSymbol *out; + JitOptSymbol *out; out = sym_new_not_null(ctx); stack_pointer[-1] = out; break; } case OP2: { - _Py_UopsSymbol *out; + JitOptSymbol *out; out = NULL; stack_pointer[-1] = out; break; diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 2ea6dba12effc1..b6985054c33d10 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -652,6 +652,89 @@ def genfn(): self.assertIsNone(f_wr()) +# See https://github.com/python/cpython/issues/125723 +class GeneratorDeallocTest(unittest.TestCase): + def test_frame_outlives_generator(self): + def g1(): + a = 42 + yield sys._getframe() + + def g2(): + a = 42 + yield + + def g3(obj): + a = 42 + obj.frame = sys._getframe() + yield + + class ObjectWithFrame(): + def __init__(self): + self.frame = None + + def get_frame(index): + if index == 1: + return next(g1()) + elif index == 2: + gen = g2() + next(gen) + return gen.gi_frame + elif index == 3: + obj = ObjectWithFrame() + next(g3(obj)) + return obj.frame + else: + return None + + for index in (1, 2, 3): + with self.subTest(index=index): + frame = get_frame(index) + frame_locals = frame.f_locals + self.assertIn('a', frame_locals) + self.assertEqual(frame_locals['a'], 42) + + def test_frame_locals_outlive_generator(self): + frame_locals1 = None + + def g1(): + nonlocal frame_locals1 + frame_locals1 = sys._getframe().f_locals + a = 42 + yield + + def g2(): + a = 42 + yield sys._getframe().f_locals + + def get_frame_locals(index): + if index == 1: + nonlocal frame_locals1 + next(g1()) + return frame_locals1 + if index == 2: + return next(g2()) + else: + return None + + for index in (1, 2): + with self.subTest(index=index): + frame_locals = get_frame_locals(index) + self.assertIn('a', frame_locals) + self.assertEqual(frame_locals['a'], 42) + + def test_frame_locals_outlive_generator_with_exec(self): + def g(): + a = 42 + yield locals(), sys._getframe().f_locals + + locals_ = {'g': g} + for i in range(10): + exec("snapshot, live_locals = next(g())", locals=locals_) + for l in (locals_['snapshot'], locals_['live_locals']): + self.assertIn('a', l) + self.assertEqual(l['a'], 42) + + class GeneratorThrowTest(unittest.TestCase): def test_exception_context_with_yield(self): diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py index 6d2593cb4cf228..391158b855624d 100644 --- a/Lib/test/test_genericpath.py +++ b/Lib/test/test_genericpath.py @@ -161,7 +161,7 @@ def test_exists(self): self.assertIs(self.pathmodule.lexists(path=filename), True) @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") - @unittest.skipIf(is_emscripten, "Emscripten pipe fds have no stat") + @unittest.skipIf(is_emscripten, "Fixed in next Emscripten release after 4.0.1") def test_exists_fd(self): r, w = os.pipe() try: diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py index 00187a3fb3537d..1a836e34e8712f 100644 --- a/Lib/test/test_glob.py +++ b/Lib/test/test_glob.py @@ -511,6 +511,10 @@ def fn(pat): @skip_unless_symlink class SymlinkLoopGlobTests(unittest.TestCase): + # gh-109959: On Linux, glob._isdir() and glob._lexists() can return False + # randomly when checking the "link/" symbolic link. + # https://github.com/python/cpython/issues/109959#issuecomment-2577550700 + @unittest.skip("flaky test") def test_selflink(self): tempdir = TESTFN + "_dir" os.makedirs(tempdir) diff --git a/Lib/test/test_http_cookies.py b/Lib/test/test_http_cookies.py index 7b3dc0fdaedc3b..d945de23493f20 100644 --- a/Lib/test/test_http_cookies.py +++ b/Lib/test/test_http_cookies.py @@ -205,6 +205,14 @@ def test_set_secure_httponly_attrs(self): self.assertEqual(C.output(), 'Set-Cookie: Customer="WILE_E_COYOTE"; HttpOnly; Secure') + def test_set_secure_httponly_partitioned_attrs(self): + C = cookies.SimpleCookie('Customer="WILE_E_COYOTE"') + C['Customer']['secure'] = True + C['Customer']['httponly'] = True + C['Customer']['partitioned'] = True + self.assertEqual(C.output(), + 'Set-Cookie: Customer="WILE_E_COYOTE"; HttpOnly; Partitioned; Secure') + def test_samesite_attrs(self): samesite_values = ['Strict', 'Lax', 'strict', 'lax'] for val in samesite_values: diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index 7a7ec555a2dbbb..75b748aee05940 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -1092,6 +1092,25 @@ def test_chunked(self): self.assertEqual(resp.read(), expected) resp.close() + # Explicit full read + for n in (-123, -1, None): + with self.subTest('full read', n=n): + sock = FakeSocket(chunked_start + last_chunk + chunked_end) + resp = client.HTTPResponse(sock, method="GET") + resp.begin() + self.assertTrue(resp.chunked) + self.assertEqual(resp.read(n), expected) + resp.close() + + # Read first chunk + with self.subTest('read1(-1)'): + sock = FakeSocket(chunked_start + last_chunk + chunked_end) + resp = client.HTTPResponse(sock, method="GET") + resp.begin() + self.assertTrue(resp.chunked) + self.assertEqual(resp.read1(-1), b"hello worl") + resp.close() + # Various read sizes for n in range(1, 12): sock = FakeSocket(chunked_start + last_chunk + chunked_end) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index a6509fc3ba0eae..2fbf83b264d5b4 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -901,6 +901,20 @@ def handle(self): self.assertRaises(imaplib.IMAP4.error, self.imap_class, *server.server_address) + def test_truncated_large_literal(self): + size = 0 + class BadHandler(SimpleIMAPHandler): + def handle(self): + self._send_textline('* OK {%d}' % size) + self._send_textline('IMAP4rev1') + + for exponent in range(15, 64): + size = 1 << exponent + with self.subTest(f"size=2e{size}"): + with self.reaped_server(BadHandler) as server: + with self.assertRaises(imaplib.IMAP4.abort): + self.imap_class(*server.server_address) + @threading_helper.reap_threads def test_simple_with_statement(self): # simplest call diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index c2cec6444cb43a..207b7ae7517450 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -29,9 +29,21 @@ from test.support import os_helper from test.support import ( - STDLIB_DIR, swap_attr, swap_item, cpython_only, is_apple_mobile, is_emscripten, - is_wasi, run_in_subinterp, run_in_subinterp_with_config, Py_TRACE_REFS, - requires_gil_enabled, Py_GIL_DISABLED, no_rerun) + STDLIB_DIR, + swap_attr, + swap_item, + cpython_only, + is_apple_mobile, + is_emscripten, + is_wasi, + run_in_subinterp, + run_in_subinterp_with_config, + Py_TRACE_REFS, + requires_gil_enabled, + Py_GIL_DISABLED, + no_rerun, + force_not_colorized_test_class, +) from test.support.import_helper import ( forget, make_legacy_pyc, unlink, unload, ready_to_import, DirsOnSysPath, CleanImport, import_module) @@ -333,6 +345,7 @@ def _from_subinterp(cls, name, interpid, pipe, script_kwargs): return cls.parse(text.decode()) +@force_not_colorized_test_class class ImportTests(unittest.TestCase): def setUp(self): @@ -539,7 +552,7 @@ def test_import_name_binding(self): import test as x import test.support self.assertIs(x, test, x.__name__) - self.assertTrue(hasattr(test.support, "__file__")) + self.assertHasAttr(test.support, "__file__") # import x.y.z as w binds z as w import test.support as y @@ -610,7 +623,7 @@ def test_file_to_source(self): sys.path.insert(0, os.curdir) try: mod = __import__(TESTFN) - self.assertTrue(mod.__file__.endswith('.py')) + self.assertEndsWith(mod.__file__, '.py') os.remove(source) del sys.modules[TESTFN] make_legacy_pyc(source) @@ -1443,7 +1456,7 @@ def test_UNC_path(self): self.fail("could not import 'test_unc_path' from %r: %r" % (unc, e)) self.assertEqual(mod.testdata, 'test_unc_path') - self.assertTrue(mod.__file__.startswith(unc), mod.__file__) + self.assertStartsWith(mod.__file__, unc) unload("test_unc_path") @@ -1456,7 +1469,7 @@ def tearDown(self): def test_relimport_star(self): # This will import * from .test_import. from .. import relimport - self.assertTrue(hasattr(relimport, "RelativeImportTests")) + self.assertHasAttr(relimport, "RelativeImportTests") def test_issue3221(self): # Note for mergers: the 'absolute' tests from the 2.x branch @@ -1786,7 +1799,7 @@ def test_frozen_importlib_is_bootstrap(self): self.assertIs(mod, _bootstrap) self.assertEqual(mod.__name__, 'importlib._bootstrap') self.assertEqual(mod.__package__, 'importlib') - self.assertTrue(mod.__file__.endswith('_bootstrap.py'), mod.__file__) + self.assertEndsWith(mod.__file__, '_bootstrap.py') def test_frozen_importlib_external_is_bootstrap_external(self): from importlib import _bootstrap_external @@ -1794,7 +1807,7 @@ def test_frozen_importlib_external_is_bootstrap_external(self): self.assertIs(mod, _bootstrap_external) self.assertEqual(mod.__name__, 'importlib._bootstrap_external') self.assertEqual(mod.__package__, 'importlib') - self.assertTrue(mod.__file__.endswith('_bootstrap_external.py'), mod.__file__) + self.assertEndsWith(mod.__file__, '_bootstrap_external.py') def test_there_can_be_only_one(self): # Issue #15386 revealed a tricky loophole in the bootstrapping @@ -2800,7 +2813,7 @@ def check_common(self, loaded): self.assertEqual(mod.__file__, self.FILE) self.assertEqual(mod.__spec__.origin, self.ORIGIN) if not isolated: - self.assertTrue(issubclass(mod.error, Exception)) + self.assertIsSubclass(mod.error, Exception) self.assertEqual(mod.int_const, 1969) self.assertEqual(mod.str_const, 'something different') self.assertIsInstance(mod._module_initialized, float) @@ -3311,30 +3324,6 @@ def test_basic_multiple_interpreters_reset_each(self): # * module's global state was initialized, not reset -@cpython_only -class CAPITests(unittest.TestCase): - def test_pyimport_addmodule(self): - # gh-105922: Test PyImport_AddModuleRef(), PyImport_AddModule() - # and PyImport_AddModuleObject() - _testcapi = import_module("_testcapi") - for name in ( - 'sys', # frozen module - 'test', # package - __name__, # package.module - ): - _testcapi.check_pyimport_addmodule(name) - - def test_pyimport_addmodule_create(self): - # gh-105922: Test PyImport_AddModuleRef(), create a new module - _testcapi = import_module("_testcapi") - name = 'dontexist' - self.assertNotIn(name, sys.modules) - self.addCleanup(unload, name) - - mod = _testcapi.check_pyimport_addmodule(name) - self.assertIs(mod, sys.modules[name]) - - @cpython_only class TestMagicNumber(unittest.TestCase): def test_magic_number_endianness(self): diff --git a/Lib/test/test_importlib/extension/test_path_hook.py b/Lib/test/test_importlib/extension/test_path_hook.py index 314a635c77e082..941dcd5432ce46 100644 --- a/Lib/test/test_importlib/extension/test_path_hook.py +++ b/Lib/test/test_importlib/extension/test_path_hook.py @@ -21,7 +21,7 @@ def hook(self, entry): def test_success(self): # Path hook should handle a directory where a known extension module # exists. - self.assertTrue(hasattr(self.hook(util.EXTENSIONS.path), 'find_spec')) + self.assertHasAttr(self.hook(util.EXTENSIONS.path), 'find_spec') (Frozen_PathHooksTests, diff --git a/Lib/test/test_importlib/frozen/test_loader.py b/Lib/test/test_importlib/frozen/test_loader.py index 1112c0664ad477..c808bb73291a7c 100644 --- a/Lib/test/test_importlib/frozen/test_loader.py +++ b/Lib/test/test_importlib/frozen/test_loader.py @@ -61,7 +61,7 @@ def exec_module(self, name, origname=None): module.main() self.assertTrue(module.initialized) - self.assertTrue(hasattr(module, '__spec__')) + self.assertHasAttr(module, '__spec__') self.assertEqual(module.__spec__.origin, 'frozen') return module, stdout.getvalue() @@ -72,7 +72,7 @@ def test_module(self): for attr, value in check.items(): self.assertEqual(getattr(module, attr), value) self.assertEqual(output, 'Hello world!\n') - self.assertTrue(hasattr(module, '__spec__')) + self.assertHasAttr(module, '__spec__') self.assertEqual(module.__spec__.loader_state.origname, name) def test_package(self): @@ -136,7 +136,7 @@ def test_get_code(self): exec(code, mod.__dict__) with captured_stdout() as stdout: mod.main() - self.assertTrue(hasattr(mod, 'initialized')) + self.assertHasAttr(mod, 'initialized') self.assertEqual(stdout.getvalue(), 'Hello world!\n') def test_get_source(self): diff --git a/Lib/test/test_importlib/import_/test_caching.py b/Lib/test/test_importlib/import_/test_caching.py index aedf0fd4f9db02..718e7d041b0860 100644 --- a/Lib/test/test_importlib/import_/test_caching.py +++ b/Lib/test/test_importlib/import_/test_caching.py @@ -78,7 +78,7 @@ def test_using_cache_for_assigning_to_attribute(self): with self.create_mock('pkg.__init__', 'pkg.module') as importer: with util.import_state(meta_path=[importer]): module = self.__import__('pkg.module') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(id(module.module), id(sys.modules['pkg.module'])) @@ -88,7 +88,7 @@ def test_using_cache_for_fromlist(self): with self.create_mock('pkg.__init__', 'pkg.module') as importer: with util.import_state(meta_path=[importer]): module = self.__import__('pkg', fromlist=['module']) - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(id(module.module), id(sys.modules['pkg.module'])) diff --git a/Lib/test/test_importlib/import_/test_fromlist.py b/Lib/test/test_importlib/import_/test_fromlist.py index 4b4b9bc3f5e04a..feccc7be09a98c 100644 --- a/Lib/test/test_importlib/import_/test_fromlist.py +++ b/Lib/test/test_importlib/import_/test_fromlist.py @@ -63,7 +63,7 @@ def test_nonexistent_object(self): with util.import_state(meta_path=[importer]): module = self.__import__('module', fromlist=['non_existent']) self.assertEqual(module.__name__, 'module') - self.assertFalse(hasattr(module, 'non_existent')) + self.assertNotHasAttr(module, 'non_existent') def test_module_from_package(self): # [module] @@ -71,7 +71,7 @@ def test_module_from_package(self): with util.import_state(meta_path=[importer]): module = self.__import__('pkg', fromlist=['module']) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(module.module.__name__, 'pkg.module') def test_nonexistent_from_package(self): @@ -79,7 +79,7 @@ def test_nonexistent_from_package(self): with util.import_state(meta_path=[importer]): module = self.__import__('pkg', fromlist=['non_existent']) self.assertEqual(module.__name__, 'pkg') - self.assertFalse(hasattr(module, 'non_existent')) + self.assertNotHasAttr(module, 'non_existent') def test_module_from_package_triggers_ModuleNotFoundError(self): # If a submodule causes an ModuleNotFoundError because it tries @@ -107,7 +107,7 @@ def basic_star_test(self, fromlist=['*']): mock['pkg'].__all__ = ['module'] module = self.__import__('pkg', fromlist=fromlist) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(module.module.__name__, 'pkg.module') def test_using_star(self): @@ -125,8 +125,8 @@ def test_star_with_others(self): mock['pkg'].__all__ = ['module1'] module = self.__import__('pkg', fromlist=['module2', '*']) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module1')) - self.assertTrue(hasattr(module, 'module2')) + self.assertHasAttr(module, 'module1') + self.assertHasAttr(module, 'module2') self.assertEqual(module.module1.__name__, 'pkg.module1') self.assertEqual(module.module2.__name__, 'pkg.module2') @@ -136,7 +136,7 @@ def test_nonexistent_in_all(self): importer['pkg'].__all__ = ['non_existent'] module = self.__import__('pkg', fromlist=['*']) self.assertEqual(module.__name__, 'pkg') - self.assertFalse(hasattr(module, 'non_existent')) + self.assertNotHasAttr(module, 'non_existent') def test_star_in_all(self): with util.mock_spec('pkg.__init__') as importer: @@ -144,7 +144,7 @@ def test_star_in_all(self): importer['pkg'].__all__ = ['*'] module = self.__import__('pkg', fromlist=['*']) self.assertEqual(module.__name__, 'pkg') - self.assertFalse(hasattr(module, '*')) + self.assertNotHasAttr(module, '*') def test_invalid_type(self): with util.mock_spec('pkg.__init__') as importer: diff --git a/Lib/test/test_importlib/import_/test_meta_path.py b/Lib/test/test_importlib/import_/test_meta_path.py index 8689017ba43112..4c00f60681acf1 100644 --- a/Lib/test/test_importlib/import_/test_meta_path.py +++ b/Lib/test/test_importlib/import_/test_meta_path.py @@ -43,7 +43,7 @@ def test_empty(self): self.assertIsNone(importlib._bootstrap._find_spec('nothing', None)) self.assertEqual(len(w), 1) - self.assertTrue(issubclass(w[-1].category, ImportWarning)) + self.assertIsSubclass(w[-1].category, ImportWarning) (Frozen_CallingOrder, diff --git a/Lib/test/test_importlib/import_/test_path.py b/Lib/test/test_importlib/import_/test_path.py index 89b52fbd1e1aff..51ff6115e1281e 100644 --- a/Lib/test/test_importlib/import_/test_path.py +++ b/Lib/test/test_importlib/import_/test_path.py @@ -1,3 +1,4 @@ +from test.support import os_helper from test.test_importlib import util importlib = util.import_importlib('importlib') @@ -80,7 +81,7 @@ def test_empty_path_hooks(self): self.assertIsNone(self.find('os')) self.assertIsNone(sys.path_importer_cache[path_entry]) self.assertEqual(len(w), 1) - self.assertTrue(issubclass(w[-1].category, ImportWarning)) + self.assertIsSubclass(w[-1].category, ImportWarning) def test_path_importer_cache_empty_string(self): # The empty string should create a finder using the cwd. @@ -153,6 +154,28 @@ def test_deleted_cwd(self): # Do not want FileNotFoundError raised. self.assertIsNone(self.machinery.PathFinder.find_spec('whatever')) + @os_helper.skip_unless_working_chmod + def test_permission_error_cwd(self): + # gh-115911: Test that an unreadable CWD does not break imports, in + # particular during early stages of interpreter startup. + with ( + os_helper.temp_dir() as new_dir, + os_helper.save_mode(new_dir), + os_helper.change_cwd(new_dir), + util.import_state(path=['']), + ): + # chmod() is done here (inside the 'with' block) because the order + # of teardown operations cannot be the reverse of setup order. See + # https://github.com/python/cpython/pull/116131#discussion_r1739649390 + try: + os.chmod(new_dir, 0o000) + except OSError: + self.skipTest("platform does not allow " + "changing mode of the cwd") + + # Do not want PermissionError raised. + self.assertIsNone(self.machinery.PathFinder.find_spec('whatever')) + def test_invalidate_caches_finders(self): # Finders with an invalidate_caches() method have it called. class FakeFinder: diff --git a/Lib/test/test_importlib/import_/test_relative_imports.py b/Lib/test/test_importlib/import_/test_relative_imports.py index 99c24f1fd9487c..e535d119763148 100644 --- a/Lib/test/test_importlib/import_/test_relative_imports.py +++ b/Lib/test/test_importlib/import_/test_relative_imports.py @@ -81,7 +81,7 @@ def callback(global_): self.__import__('pkg') # For __import__(). module = self.__import__('', global_, fromlist=['mod2'], level=1) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'mod2')) + self.assertHasAttr(module, 'mod2') self.assertEqual(module.mod2.attr, 'pkg.mod2') self.relative_import_test(create, globals_, callback) @@ -107,7 +107,7 @@ def callback(global_): module = self.__import__('', global_, fromlist=['module'], level=1) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(module.module.attr, 'pkg.module') self.relative_import_test(create, globals_, callback) @@ -131,7 +131,7 @@ def callback(global_): module = self.__import__('', global_, fromlist=['subpkg2'], level=2) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'subpkg2')) + self.assertHasAttr(module, 'subpkg2') self.assertEqual(module.subpkg2.attr, 'pkg.subpkg2.__init__') self.relative_import_test(create, globals_, callback) diff --git a/Lib/test/test_importlib/resources/_path.py b/Lib/test/test_importlib/resources/_path.py index 1f97c96146960d..b144628cb73c77 100644 --- a/Lib/test/test_importlib/resources/_path.py +++ b/Lib/test/test_importlib/resources/_path.py @@ -2,15 +2,44 @@ import functools from typing import Dict, Union +from typing import runtime_checkable +from typing import Protocol #### -# from jaraco.path 3.4.1 +# from jaraco.path 3.7.1 -FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore +class Symlink(str): + """ + A string indicating the target of a symlink. + """ + + +FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']] + + +@runtime_checkable +class TreeMaker(Protocol): + def __truediv__(self, *args, **kwargs): ... # pragma: no cover + + def mkdir(self, **kwargs): ... # pragma: no cover + + def write_text(self, content, **kwargs): ... # pragma: no cover + + def write_bytes(self, content): ... # pragma: no cover -def build(spec: FilesSpec, prefix=pathlib.Path()): + def symlink_to(self, target): ... # pragma: no cover + + +def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker: + return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore[return-value] + + +def build( + spec: FilesSpec, + prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore[assignment] +): """ Build a set of files/directories, as described by the spec. @@ -25,21 +54,25 @@ def build(spec: FilesSpec, prefix=pathlib.Path()): ... "__init__.py": "", ... }, ... "baz.py": "# Some code", - ... } + ... "bar.py": Symlink("baz.py"), + ... }, + ... "bing": Symlink("foo"), ... } >>> target = getfixture('tmp_path') >>> build(spec, target) >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8') '# Some code' + >>> target.joinpath('bing/bar.py').read_text(encoding='utf-8') + '# Some code' """ for name, contents in spec.items(): - create(contents, pathlib.Path(prefix) / name) + create(contents, _ensure_tree_maker(prefix) / name) @functools.singledispatch def create(content: Union[str, bytes, FilesSpec], path): path.mkdir(exist_ok=True) - build(content, prefix=path) # type: ignore + build(content, prefix=path) # type: ignore[arg-type] @create.register @@ -52,5 +85,10 @@ def _(content: str, path): path.write_text(content, encoding='utf-8') +@create.register +def _(content: Symlink, path): + path.symlink_to(content) + + # end from jaraco.path #### diff --git a/Lib/test/test_importlib/resources/test_files.py b/Lib/test/test_importlib/resources/test_files.py index 933894dce2c045..db8a4e62a32dc6 100644 --- a/Lib/test/test_importlib/resources/test_files.py +++ b/Lib/test/test_importlib/resources/test_files.py @@ -60,6 +60,26 @@ class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase): MODULE = 'namespacedata01' + def test_non_paths_in_dunder_path(self): + """ + Non-path items in a namespace package's ``__path__`` are ignored. + + As reported in python/importlib_resources#311, some tools + like Setuptools, when creating editable packages, will inject + non-paths into a namespace package's ``__path__``, a + sentinel like + ``__editable__.sample_namespace-1.0.finder.__path_hook__`` + to cause the ``PathEntryFinder`` to be called when searching + for packages. In that case, resources should still be loadable. + """ + import namespacedata01 + + namespacedata01.__path__.append( + '__editable__.sample_namespace-1.0.finder.__path_hook__' + ) + + resources.files(namespacedata01) + class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase): ZIP_MODULE = 'namespacedata01' @@ -86,7 +106,7 @@ def test_module_resources(self): """ A module can have resources found adjacent to the module. """ - import mod + import mod # type: ignore[import-not-found] actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8') assert actual == self.spec['res.txt'] diff --git a/Lib/test/test_importlib/resources/test_functional.py b/Lib/test/test_importlib/resources/test_functional.py index 4317abf3162c52..e8d25fa4d9faf0 100644 --- a/Lib/test/test_importlib/resources/test_functional.py +++ b/Lib/test/test_importlib/resources/test_functional.py @@ -43,12 +43,6 @@ def _gen_resourcetxt_path_parts(self): with self.subTest(path_parts=path_parts): yield path_parts - def assertEndsWith(self, string, suffix): - """Assert that `string` ends with `suffix`. - - Used to ignore an architecture-specific UTF-16 byte-order mark.""" - self.assertEqual(string[-len(suffix) :], suffix) - def test_read_text(self): self.assertEqual( resources.read_text(self.anchor01, 'utf-8.file'), diff --git a/Lib/test/test_importlib/resources/test_path.py b/Lib/test/test_importlib/resources/test_path.py index 378dc7a2baeb23..903911f57b3306 100644 --- a/Lib/test/test_importlib/resources/test_path.py +++ b/Lib/test/test_importlib/resources/test_path.py @@ -20,7 +20,7 @@ def test_reading(self): target = resources.files(self.data) / 'utf-8.file' with resources.as_file(target) as path: self.assertIsInstance(path, pathlib.Path) - self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) + self.assertEndsWith(path.name, "utf-8.file") self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8')) diff --git a/Lib/test/test_importlib/source/test_finder.py b/Lib/test/test_importlib/source/test_finder.py index 8c06c4da1f5cba..4de736a6bf3b2d 100644 --- a/Lib/test/test_importlib/source/test_finder.py +++ b/Lib/test/test_importlib/source/test_finder.py @@ -73,7 +73,7 @@ def run_test(self, test, create=None, *, compile_=None, unlink=None): if error.errno != errno.ENOENT: raise loader = self.import_(mapping['.root'], test) - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') return loader def test_module(self): @@ -100,7 +100,7 @@ def test_module_in_package(self): with util.create_modules('pkg.__init__', 'pkg.sub') as mapping: pkg_dir = os.path.dirname(mapping['pkg.__init__']) loader = self.import_(pkg_dir, 'pkg.sub') - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') # [sub package] def test_package_in_package(self): @@ -108,7 +108,7 @@ def test_package_in_package(self): with context as mapping: pkg_dir = os.path.dirname(mapping['pkg.__init__']) loader = self.import_(pkg_dir, 'pkg.sub') - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') # [package over modules] def test_package_over_module(self): @@ -129,7 +129,7 @@ def test_empty_string_for_dir(self): file.write("# test file for importlib") try: loader = self._find(finder, 'mod', loader_only=True) - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') finally: os.unlink('mod.py') diff --git a/Lib/test/test_importlib/source/test_path_hook.py b/Lib/test/test_importlib/source/test_path_hook.py index f274330e0b333b..6e1c23e6a9842b 100644 --- a/Lib/test/test_importlib/source/test_path_hook.py +++ b/Lib/test/test_importlib/source/test_path_hook.py @@ -15,12 +15,12 @@ def path_hook(self): def test_success(self): with util.create_modules('dummy') as mapping: - self.assertTrue(hasattr(self.path_hook()(mapping['.root']), - 'find_spec')) + self.assertHasAttr(self.path_hook()(mapping['.root']), + 'find_spec') def test_empty_string(self): # The empty string represents the cwd. - self.assertTrue(hasattr(self.path_hook()(''), 'find_spec')) + self.assertHasAttr(self.path_hook()(''), 'find_spec') (Frozen_PathHookTest, diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index 603125f6d926f6..b1ab52f966ffdb 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -43,14 +43,12 @@ def setUp(self): def test_subclasses(self): # Test that the expected subclasses inherit. for subclass in self.subclasses: - self.assertTrue(issubclass(subclass, self.__test), - "{0} is not a subclass of {1}".format(subclass, self.__test)) + self.assertIsSubclass(subclass, self.__test) def test_superclasses(self): # Test that the class inherits from the expected superclasses. for superclass in self.superclasses: - self.assertTrue(issubclass(self.__test, superclass), - "{0} is not a superclass of {1}".format(superclass, self.__test)) + self.assertIsSubclass(self.__test, superclass) class MetaPathFinder(InheritanceTests): @@ -226,7 +224,15 @@ class ResourceLoaderDefaultsTests(ABCTestHarness): SPLIT = make_abc_subclasses(ResourceLoader) def test_get_data(self): - with self.assertRaises(IOError): + with ( + self.assertRaises(IOError), + self.assertWarnsRegex( + DeprecationWarning, + r"importlib\.abc\.ResourceLoader is deprecated in favour of " + r"supporting resource loading through importlib\.resources" + r"\.abc\.TraversableResources.", + ), + ): self.ins.get_data('/some/path') @@ -416,14 +422,14 @@ def test_source_to_code_source(self): # Since compile() can handle strings, so should source_to_code(). source = 'attr = 42' module = self.source_to_module(source) - self.assertTrue(hasattr(module, 'attr')) + self.assertHasAttr(module, 'attr') self.assertEqual(module.attr, 42) def test_source_to_code_bytes(self): # Since compile() can handle bytes, so should source_to_code(). source = b'attr = 42' module = self.source_to_module(source) - self.assertTrue(hasattr(module, 'attr')) + self.assertHasAttr(module, 'attr') self.assertEqual(module.attr, 42) def test_source_to_code_path(self): @@ -757,7 +763,7 @@ def test_package_settings(self): warnings.simplefilter('ignore', DeprecationWarning) module = self.loader.load_module(self.name) self.verify_module(module) - self.assertFalse(hasattr(module, '__path__')) + self.assertNotHasAttr(module, '__path__') def test_get_source_encoding(self): # Source is considered encoded in UTF-8 by default unless otherwise @@ -913,5 +919,47 @@ def test_universal_newlines(self): SourceOnlyLoaderMock=SPLIT_SOL) +class SourceLoaderDeprecationWarningsTests(unittest.TestCase): + """Tests SourceLoader deprecation warnings.""" + + def test_deprecated_path_mtime(self): + from importlib.abc import SourceLoader + class DummySourceLoader(SourceLoader): + def get_data(self, path): + return b'' + + def get_filename(self, fullname): + return 'foo.py' + + def path_stats(self, path): + return {'mtime': 1} + with self.assertWarnsRegex( + DeprecationWarning, + r"importlib\.abc\.ResourceLoader is deprecated in favour of " + r"supporting resource loading through importlib\.resources" + r"\.abc\.TraversableResources.", + ): + loader = DummySourceLoader() + + with self.assertWarnsRegex( + DeprecationWarning, + r"SourceLoader\.path_mtime is deprecated in favour of " + r"SourceLoader\.path_stats\(\)\." + ): + loader.path_mtime('foo.py') + + +class ResourceLoaderDeprecationWarningsTests(unittest.TestCase): + """Tests ResourceLoader deprecation warnings.""" + + def test_deprecated_resource_loader(self): + from importlib.abc import ResourceLoader + class DummyLoader(ResourceLoader): + def get_data(self, path): + return b'' + + with self.assertWarns(DeprecationWarning): + DummyLoader() + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py index 51ea5270b1a928..1bc531a2fe34e7 100644 --- a/Lib/test/test_importlib/test_api.py +++ b/Lib/test/test_importlib/test_api.py @@ -430,8 +430,7 @@ def test_everyone_has___loader__(self): for name, module in sys.modules.items(): if isinstance(module, types.ModuleType): with self.subTest(name=name): - self.assertTrue(hasattr(module, '__loader__'), - '{!r} lacks a __loader__ attribute'.format(name)) + self.assertHasAttr(module, '__loader__') if self.machinery.BuiltinImporter.find_spec(name): self.assertIsNot(module.__loader__, None) elif self.machinery.FrozenImporter.find_spec(name): @@ -441,7 +440,7 @@ def test_everyone_has___spec__(self): for name, module in sys.modules.items(): if isinstance(module, types.ModuleType): with self.subTest(name=name): - self.assertTrue(hasattr(module, '__spec__')) + self.assertHasAttr(module, '__spec__') if self.machinery.BuiltinImporter.find_spec(name): self.assertIsNot(module.__spec__, None) elif self.machinery.FrozenImporter.find_spec(name): @@ -492,5 +491,18 @@ def test_util(self): support.check__all__(self, util['Source'], extra=extra) +class TestDeprecations(unittest.TestCase): + def test_machinery_deprecated_attributes(self): + from importlib import machinery + attributes = ( + 'DEBUG_BYTECODE_SUFFIXES', + 'OPTIMIZED_BYTECODE_SUFFIXES', + ) + for attr in attributes: + with self.subTest(attr=attr): + with self.assertWarns(DeprecationWarning): + getattr(machinery, attr) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_lazy.py b/Lib/test/test_importlib/test_lazy.py index 5c6e0303528906..e48fad8898f0ef 100644 --- a/Lib/test/test_importlib/test_lazy.py +++ b/Lib/test/test_importlib/test_lazy.py @@ -125,12 +125,12 @@ def test_delete_eventual_attr(self): # Deleting an attribute should stay deleted. module = self.new_module() del module.attr - self.assertFalse(hasattr(module, 'attr')) + self.assertNotHasAttr(module, 'attr') def test_delete_preexisting_attr(self): module = self.new_module() del module.__name__ - self.assertFalse(hasattr(module, '__name__')) + self.assertNotHasAttr(module, '__name__') def test_module_substitution_error(self): with test_util.uncache(TestingImporter.module_name): diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index cbbdada3b010a7..6ca0978f9bca69 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -80,7 +80,7 @@ def test_cant_import_other(self): def test_simple_repr(self): import foo.one - self.assertTrue(repr(foo).startswith("sp\xc3\xa4m\x00') + @patch('socket.socket') + def test_tcp_timeout(self, mock_socket): + instance_mock_sock = mock_socket.return_value + instance_mock_sock.connect.side_effect = socket.timeout + + with self.assertRaises(socket.timeout): + logging.handlers.SysLogHandler(address=('localhost', 514), + socktype=socket.SOCK_STREAM, + timeout=1) + + instance_mock_sock.close.assert_called() + @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") class UnixSysLogHandlerTest(SysLogHandlerTest): @@ -3524,7 +3537,7 @@ def test_config14_ok(self): self.assertEqual(h.foo, 'bar') self.assertEqual(h.terminator, '!\n') logging.warning('Exclamation') - self.assertTrue(output.getvalue().endswith('Exclamation!\n')) + self.assertEndsWith(output.getvalue(), 'Exclamation!\n') def test_config15_ok(self): @@ -4281,7 +4294,7 @@ def test_queue_handler(self): msg = self.next_message() self.que_logger.warning(msg) data = self.queue.get_nowait() - self.assertTrue(isinstance(data, logging.LogRecord)) + self.assertIsInstance(data, logging.LogRecord) self.assertEqual(data.name, self.que_logger.name) self.assertEqual((data.msg, data.args), (msg, None)) @@ -4879,14 +4892,14 @@ def test_formatting(self): r.removeHandler(h) h.close() r = h.records[0] - self.assertTrue(r.exc_text.startswith('Traceback (most recent ' - 'call last):\n')) - self.assertTrue(r.exc_text.endswith('\nRuntimeError: ' - 'deliberate mistake')) - self.assertTrue(r.stack_info.startswith('Stack (most recent ' - 'call last):\n')) - self.assertTrue(r.stack_info.endswith('logging.exception(\'failed\', ' - 'stack_info=True)')) + self.assertStartsWith(r.exc_text, + 'Traceback (most recent call last):\n') + self.assertEndsWith(r.exc_text, + '\nRuntimeError: deliberate mistake') + self.assertStartsWith(r.stack_info, + 'Stack (most recent call last):\n') + self.assertEndsWith(r.stack_info, + "logging.exception('failed', stack_info=True)") class LastResortTest(BaseTest): @@ -5229,8 +5242,8 @@ class LogRecordTest(BaseTest): def test_str_rep(self): r = logging.makeLogRecord({}) s = str(r) - self.assertTrue(s.startswith('')) + self.assertStartsWith(s, '') def test_dict_arg(self): h = RecordingHandler() @@ -5880,14 +5893,14 @@ def test_extra_in_records(self): self.adapter.critical('foo should be here') self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] - self.assertTrue(hasattr(record, 'foo')) + self.assertHasAttr(record, 'foo') self.assertEqual(record.foo, '1') def test_extra_not_merged_by_default(self): self.adapter.critical('foo should NOT be here', extra={'foo': 'nope'}) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] - self.assertFalse(hasattr(record, 'foo')) + self.assertNotHasAttr(record, 'foo') def test_extra_merged(self): self.adapter = logging.LoggerAdapter(logger=self.logger, @@ -5897,8 +5910,8 @@ def test_extra_merged(self): self.adapter.critical('foo and bar should be here', extra={'bar': '2'}) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] - self.assertTrue(hasattr(record, 'foo')) - self.assertTrue(hasattr(record, 'bar')) + self.assertHasAttr(record, 'foo') + self.assertHasAttr(record, 'bar') self.assertEqual(record.foo, '1') self.assertEqual(record.bar, '2') @@ -5910,7 +5923,7 @@ def test_extra_merged_log_call_has_precedence(self): self.adapter.critical('foo shall be min', extra={'foo': '2'}) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] - self.assertTrue(hasattr(record, 'foo')) + self.assertHasAttr(record, 'foo') self.assertEqual(record.foo, '2') @@ -6624,18 +6637,19 @@ def namer(filename): p = '%s.log.' % prefix for c in candidates: d, fn = os.path.split(c) - self.assertTrue(fn.startswith(p)) + self.assertStartsWith(fn, p) elif prefix.startswith('d.e'): for c in candidates: d, fn = os.path.split(c) - self.assertTrue(fn.endswith('.log'), fn) - self.assertTrue(fn.startswith(prefix + '.') and - fn[len(prefix) + 2].isdigit()) + self.assertEndsWith(fn, '.log') + self.assertStartsWith(fn, prefix + '.') + self.assertTrue(fn[len(prefix) + 2].isdigit()) elif prefix == 'g': for c in candidates: d, fn = os.path.split(c) - self.assertTrue(fn.endswith('.oldlog')) - self.assertTrue(fn.startswith('g') and fn[1].isdigit()) + self.assertEndsWith(fn, '.oldlog') + self.assertStartsWith(fn, 'g') + self.assertTrue(fn[1].isdigit()) def test_compute_files_to_delete_same_filename_different_extensions(self): # See GH-93205 for background @@ -6673,7 +6687,7 @@ def test_compute_files_to_delete_same_filename_different_extensions(self): matcher = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}\Z") for c in candidates: d, fn = os.path.split(c) - self.assertTrue(fn.startswith(prefix+'.')) + self.assertStartsWith(fn, prefix+'.') suffix = fn[(len(prefix)+1):] self.assertRegex(suffix, matcher) diff --git a/Lib/test/test_long.py b/Lib/test/test_long.py index 19978118c80dba..f336d49fa4f008 100644 --- a/Lib/test/test_long.py +++ b/Lib/test/test_long.py @@ -1470,7 +1470,6 @@ def equivalent_python(byte_array, byteorder, signed=False): b'\x00': 0, b'\x00\x00': 0, b'\x01': 1, - b'\x00\x01': 256, b'\xff': -1, b'\xff\xff': -1, b'\x81': -127, diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py index 6976a5d85da019..2c57d288bc03ff 100644 --- a/Lib/test/test_math.py +++ b/Lib/test/test_math.py @@ -2503,6 +2503,46 @@ def test_input_exceptions(self): self.assertRaises(TypeError, math.atan2, 1.0) self.assertRaises(TypeError, math.atan2, 1.0, 2.0, 3.0) + def test_exception_messages(self): + x = -1.1 + with self.assertRaisesRegex(ValueError, + f"expected a nonnegative input, got {x}"): + math.sqrt(x) + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log(x) + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log(123, x) + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log(x, 123) + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log2(x) + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log10(x) + x = decimal.Decimal('-1.1') + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log(x) + x = fractions.Fraction(1, 10**400) + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {float(x)}"): + math.log(x) + x = -123 + with self.assertRaisesRegex(ValueError, + f"expected a positive input, got {x}"): + math.log(x) + with self.assertRaisesRegex(ValueError, + f"expected a float or nonnegative integer, got {x}"): + math.gamma(x) + x = 1.0 + with self.assertRaisesRegex(ValueError, + f"expected a number between -1 and 1, got {x}"): + math.atanh(x) + # Custom assertions. def assertIsNaN(self, value): diff --git a/Lib/test/test_metaclass.py b/Lib/test/test_metaclass.py index b37b7defe84d1c..07a333f98fa0a9 100644 --- a/Lib/test/test_metaclass.py +++ b/Lib/test/test_metaclass.py @@ -254,6 +254,33 @@ [...] test.test_metaclass.ObscureException +Test setting attributes with a non-base type in mro() (gh-127773). + + >>> class Base: + ... value = 1 + ... + >>> class Meta(type): + ... def mro(cls): + ... return (cls, Base, object) + ... + >>> class WeirdClass(metaclass=Meta): + ... pass + ... + >>> Base.value + 1 + >>> WeirdClass.value + 1 + >>> Base.value = 2 + >>> Base.value + 2 + >>> WeirdClass.value + 2 + >>> Base.value = 3 + >>> Base.value + 3 + >>> WeirdClass.value + 3 + """ import sys diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index 32b3a6ac049e28..3125d190626e38 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -12,9 +12,9 @@ import test.support from test.support import requires_specialization_ft, script_helper -from test.support.import_helper import import_module _testcapi = test.support.import_helper.import_module("_testcapi") +_testinternalcapi = test.support.import_helper.import_module("_testinternalcapi") PAIR = (0,1) @@ -898,13 +898,13 @@ def implicit_stop_iteration(iterator=None): # re-specialize immediately, so that we can we can test the # unspecialized version of the loop first. # Note: this assumes that we don't specialize loops over sets. - implicit_stop_iteration(set(range(100))) + implicit_stop_iteration(set(range(_testinternalcapi.SPECIALIZATION_THRESHOLD))) # This will record a RAISE event for the StopIteration. self.check_events(implicit_stop_iteration, expected, recorders=recorders) # Now specialize, so that we see a STOP_ITERATION event. - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): implicit_stop_iteration() # This will record a STOP_ITERATION event for the StopIteration. @@ -1058,7 +1058,7 @@ def f(): except ValueError: pass - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f() recorders = ( ReturnRecorder, @@ -1589,11 +1589,11 @@ def whilefunc(n=0): ('branch right', 'whilefunc', 1, 3)]) self.check_events(func, recorders = BRANCH_OFFSET_RECORDERS, expected = [ - ('branch left', 'func', 28, 34), - ('branch right', 'func', 46, 60), - ('branch left', 'func', 28, 34), - ('branch left', 'func', 46, 52), - ('branch right', 'func', 28, 72)]) + ('branch left', 'func', 28, 32), + ('branch right', 'func', 44, 58), + ('branch left', 'func', 28, 32), + ('branch left', 'func', 44, 50), + ('branch right', 'func', 28, 70)]) def test_except_star(self): @@ -1649,7 +1649,7 @@ def foo(n=0): return None in_loop = ('branch left', 'foo', 10, 16) - exit_loop = ('branch right', 'foo', 10, 32) + exit_loop = ('branch right', 'foo', 10, 40) self.check_events(foo, recorders = BRANCH_OFFSET_RECORDERS, expected = [ in_loop, in_loop, @@ -1658,6 +1658,88 @@ def foo(n=0): exit_loop]) +class TestBranchConsistency(MonitoringTestBase, unittest.TestCase): + + def check_branches(self, func, tool=TEST_TOOL, recorders=BRANCH_OFFSET_RECORDERS): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + event_list = [] + all_events = 0 + for recorder in recorders: + ev = recorder.event_type + sys.monitoring.register_callback(tool, ev, recorder(event_list)) + all_events |= ev + sys.monitoring.set_local_events(tool, func.__code__, all_events) + func() + sys.monitoring.set_local_events(tool, func.__code__, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + lefts = set() + rights = set() + for (src, left, right) in func.__code__.co_branches(): + lefts.add((src, left)) + rights.add((src, right)) + for event in event_list: + way, _, src, dest = event + if "left" in way: + self.assertIn((src, dest), lefts) + else: + self.assertIn("right", way) + self.assertIn((src, dest), rights) + finally: + sys.monitoring.set_local_events(tool, func.__code__, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + + def test_simple(self): + + def func(): + x = 1 + for a in range(2): + if a: + x = 4 + else: + x = 6 + 7 + + self.check_branches(func) + + def whilefunc(n=0): + while n < 3: + n += 1 # line 2 + 3 + + self.check_branches(whilefunc) + + def test_except_star(self): + + class Foo: + def meth(self): + pass + + def func(): + try: + try: + raise KeyError + except* Exception as e: + f = Foo(); f.meth() + except KeyError: + pass + + + self.check_branches(func) + + def test4(self): + + def foo(n=0): + while n<4: + pass + n += 1 + return None + + self.check_branches(foo) + + class TestLoadSuperAttr(CheckEvents): RECORDERS = CallRecorder, LineRecorder, CRaiseRecorder, CReturnRecorder @@ -1952,8 +2034,8 @@ def __init__(self, set_event): sys.monitoring.set_events(TEST_TOOL, E.PY_RESUME) def make_foo_optimized_then_set_event(): - for i in range(100): - Foo(i == 99) + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD + 1): + Foo(i == _testinternalcapi.SPECIALIZATION_THRESHOLD) try: make_foo_optimized_then_set_event() @@ -2005,20 +2087,6 @@ def callback(code, instruction_offset): class TestOptimizer(MonitoringTestBase, unittest.TestCase): - def setUp(self): - _testinternalcapi = import_module("_testinternalcapi") - if hasattr(_testinternalcapi, "get_optimizer"): - self.old_opt = _testinternalcapi.get_optimizer() - opt = _testinternalcapi.new_counter_optimizer() - _testinternalcapi.set_optimizer(opt) - super(TestOptimizer, self).setUp() - - def tearDown(self): - super(TestOptimizer, self).tearDown() - import _testinternalcapi - if hasattr(_testinternalcapi, "get_optimizer"): - _testinternalcapi.set_optimizer(self.old_opt) - def test_for_loop(self): def test_func(x): i = 0 @@ -2039,9 +2107,9 @@ def test_func(recorder): set_events = sys.monitoring.set_events line = E.LINE i = 0 - for i in range(551): - # Turn on events without branching once i reaches 500. - set_events(TEST_TOOL, line * int(i >= 500)) + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD + 51): + # Turn on events without branching once i reaches _testinternalcapi.SPECIALIZATION_THRESHOLD. + set_events(TEST_TOOL, line * int(i >= _testinternalcapi.SPECIALIZATION_THRESHOLD)) pass pass pass diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 6715071af8c752..da01c65a1c2954 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -940,7 +940,7 @@ def check_error(paths, expected): self.assertRaises(TypeError, ntpath.commonpath, ['C:\\Foo', b'Foo\\Baz']) self.assertRaises(TypeError, ntpath.commonpath, ['Foo', b'C:\\Foo\\Baz']) - @unittest.skipIf(is_emscripten, "Emscripten cannot fstat unnamed files.") + @unittest.skipIf(is_emscripten, "Fixed in next Emscripten release after 4.0.1") def test_sameopenfile(self): with TemporaryFile() as tf1, TemporaryFile() as tf2: # Make sure the same file is really the same diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 79f452f8068c7f..87de4c94ba26fb 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -6,7 +6,7 @@ import unittest from test.support import (threading_helper, check_impl_detail, requires_specialization, requires_specialization_ft, - cpython_only) + cpython_only, requires_jit_disabled, reset_code) from test.support.import_helper import import_module # Skip this module on other interpreters, it is cpython specific: @@ -16,20 +16,6 @@ _testinternalcapi = import_module("_testinternalcapi") -def disabling_optimizer(func): - def wrapper(*args, **kwargs): - if not hasattr(_testinternalcapi, "get_optimizer"): - return func(*args, **kwargs) - old_opt = _testinternalcapi.get_optimizer() - _testinternalcapi.set_optimizer(None) - try: - return func(*args, **kwargs) - finally: - _testinternalcapi.set_optimizer(old_opt) - - return wrapper - - class TestBase(unittest.TestCase): def assert_specialized(self, f, opname): instructions = dis.get_instructions(f, adaptive=True) @@ -59,7 +45,8 @@ def f(self): d = D() - self.assertEqual(d.f(), 1) # warmup + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD - 1): + self.assertEqual(d.f(), 1) # warmup calls.clear() self.assertEqual(d.f(), 1) # try to specialize self.assertEqual(calls, [(d, D)]) @@ -79,7 +66,7 @@ def f(o): return o.x o = C() - for i in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert f(o) == 1 Descriptor.__get__ = lambda self, instance, value: 2 @@ -106,13 +93,13 @@ def __set__(self, instance, value): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Descriptor.__get__ = __get__ Descriptor.__set__ = __set__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_descriptor_shadows_class_attribute(self): @@ -127,7 +114,7 @@ class Class(metaclass=Metaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_set_descriptor_after_optimization(self): @@ -144,12 +131,12 @@ def attribute(self): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Metaclass.attribute = attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_del_descriptor_after_optimization(self): @@ -164,12 +151,12 @@ class Class(metaclass=Metaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) del Metaclass.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_type_descriptor_shadows_attribute_method(self): @@ -179,7 +166,7 @@ class Class: def f(): return Class.mro - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertIsNone(f()) def test_type_descriptor_shadows_attribute_member(self): @@ -189,7 +176,7 @@ class Class: def f(): return Class.__base__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertIs(f(), object) def test_type_descriptor_shadows_attribute_getset(self): @@ -199,7 +186,7 @@ class Class: def f(): return Class.__name__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertEqual(f(), "Class") def test_metaclass_getattribute(self): @@ -213,7 +200,7 @@ class Class(metaclass=Metaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_swap(self): @@ -233,12 +220,12 @@ class Class(metaclass=OldMetaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Class.__class__ = NewMetaclass - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_load_shadowing_slot_should_raise_type_error(self): @@ -255,7 +242,7 @@ def f(o): o = Sneaky() o.shadowed = 42 - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -272,7 +259,7 @@ def f(o): o = Sneaky() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -288,7 +275,7 @@ def f(o): o = Sneaky() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -304,7 +291,7 @@ def f(o): o = Sneaky() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -332,13 +319,13 @@ def attribute(): def f(): return instance.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Descriptor.__get__ = __get__ Descriptor.__set__ = __set__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_descriptor_added_after_optimization(self): @@ -361,13 +348,13 @@ def __set__(self, instance, value): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Descriptor.__get__ = __get__ Descriptor.__set__ = __set__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_descriptor_shadows_class_attribute(self): @@ -383,7 +370,7 @@ def attribute(): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_set_descriptor_after_optimization(self): @@ -401,12 +388,12 @@ def attribute(self): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Metaclass.attribute = attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_del_descriptor_after_optimization(self): @@ -422,12 +409,12 @@ def attribute(): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) del Metaclass.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_type_descriptor_shadows_attribute_method(self): @@ -438,7 +425,7 @@ def mro(): def f(): return Class.mro() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertEqual(f(), ["Spam", "eggs"]) def test_type_descriptor_shadows_attribute_member(self): @@ -449,7 +436,7 @@ def __base__(): def f(): return Class.__base__() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertNotEqual(f(), "Spam") def test_metaclass_getattribute(self): @@ -464,7 +451,7 @@ def attribute(): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_swap(self): @@ -484,12 +471,12 @@ class Class(metaclass=OldMetaclass): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Class.__class__ = NewMetaclass - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) @@ -504,7 +491,7 @@ def f(): pass f.__defaults__ = (None,) - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f() def test_too_many_defaults_1(self): @@ -512,7 +499,7 @@ def f(x): pass f.__defaults__ = (None, None) - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f(None) f() @@ -521,12 +508,12 @@ def f(x, y): pass f.__defaults__ = (None, None, None) - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f(None, None) f(None) f() - @disabling_optimizer + @requires_jit_disabled @requires_specialization_ft def test_assign_init_code(self): class MyClass: @@ -537,7 +524,7 @@ def instantiate(): return MyClass() # Trigger specialization - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): instantiate() self.assert_specialized(instantiate, "CALL_ALLOC_AND_ENTER_INIT") @@ -549,13 +536,13 @@ def count_args(self, *args): MyClass.__init__.__code__ = count_args.__code__ instantiate() - @disabling_optimizer + @requires_jit_disabled @requires_specialization_ft def test_push_init_frame_fails(self): def instantiate(): return InitTakesArg() - for _ in range(2): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): instantiate() self.assert_specialized(instantiate, "CALL_ALLOC_AND_ENTER_INIT") @@ -564,16 +551,25 @@ def instantiate(): instantiate() +def make_deferred_ref_count_obj(): + """Create an object that uses deferred reference counting. + + Only objects that use deferred refence counting may be stored in inline + caches in free-threaded builds. This constructs a new class named Foo, + which uses deferred reference counting. + """ + return type("Foo", (object,), {}) + + @threading_helper.requires_working_threading() class TestRacesDoNotCrash(TestBase): # Careful with these. Bigger numbers have a higher chance of catching bugs, # but you can also burn through a *ton* of type/dict/function versions: ITEMS = 1000 LOOPS = 4 - WARMUPS = 2 WRITERS = 2 - @disabling_optimizer + @requires_jit_disabled def assert_races_do_not_crash( self, opname, get_items, read, write, *, check_items=False ): @@ -583,11 +579,11 @@ def assert_races_do_not_crash( # Reset: if check_items: for item in items: - item.__code__ = item.__code__.replace() + reset_code(item) else: - read.__code__ = read.__code__.replace() + reset_code(read) # Specialize: - for _ in range(self.WARMUPS): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): read(items) if check_items: for item in items: @@ -714,11 +710,11 @@ def write(items): opname = "FOR_ITER_LIST" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft def test_load_attr_class(self): def get_items(): class C: - a = object() + a = make_deferred_ref_count_obj() items = [] for _ in range(self.ITEMS): @@ -739,12 +735,45 @@ def write(items): del item.a except AttributeError: pass - item.a = object() + item.a = make_deferred_ref_count_obj() opname = "LOAD_ATTR_CLASS" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft + def test_load_attr_class_with_metaclass_check(self): + def get_items(): + class Meta(type): + pass + + class C(metaclass=Meta): + a = make_deferred_ref_count_obj() + + items = [] + for _ in range(self.ITEMS): + item = C + items.append(item) + return items + + def read(items): + for item in items: + try: + item.a + except AttributeError: + pass + + def write(items): + for item in items: + try: + del item.a + except AttributeError: + pass + item.a = make_deferred_ref_count_obj() + + opname = "LOAD_ATTR_CLASS_WITH_METACLASS_CHECK" + self.assert_races_do_not_crash(opname, get_items, read, write) + + @requires_specialization_ft def test_load_attr_getattribute_overridden(self): def get_items(): class C: @@ -774,7 +803,7 @@ def write(items): opname = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft def test_load_attr_instance_value(self): def get_items(): class C: @@ -798,7 +827,7 @@ def write(items): opname = "LOAD_ATTR_INSTANCE_VALUE" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft def test_load_attr_method_lazy_dict(self): def get_items(): class C(Exception): @@ -828,7 +857,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_LAZY_DICT" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft def test_load_attr_method_no_dict(self): def get_items(): class C: @@ -859,7 +888,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_NO_DICT" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft def test_load_attr_method_with_values(self): def get_items(): class C: @@ -914,7 +943,7 @@ def write(items): opname = "LOAD_ATTR_MODULE" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft def test_load_attr_property(self): def get_items(): class C: @@ -944,7 +973,34 @@ def write(items): opname = "LOAD_ATTR_PROPERTY" self.assert_races_do_not_crash(opname, get_items, read, write) - @requires_specialization + @requires_specialization_ft + def test_load_attr_slot(self): + def get_items(): + class C: + __slots__ = ["a", "b"] + + items = [] + for i in range(self.ITEMS): + item = C() + item.a = i + item.b = i + self.ITEMS + items.append(item) + return items + + def read(items): + for item in items: + item.a + item.b + + def write(items): + for item in items: + item.a = 100 + item.b = 200 + + opname = "LOAD_ATTR_SLOT" + self.assert_races_do_not_crash(opname, get_items, read, write) + + @requires_specialization_ft def test_load_attr_with_hint(self): def get_items(): class C: @@ -955,7 +1011,7 @@ class C: item = C() item.a = None # Resize into a combined unicode dict: - for i in range(29): + for i in range(_testinternalcapi.SHARED_KEYS_MAX_SIZE - 1): setattr(item, f"_{i}", None) items.append(item) return items @@ -1026,7 +1082,7 @@ class C: for _ in range(self.ITEMS): item = C() # Resize into a combined unicode dict: - for i in range(29): + for i in range(_testinternalcapi.SHARED_KEYS_MAX_SIZE - 1): setattr(item, f"_{i}", None) items.append(item) return items @@ -1122,7 +1178,7 @@ def test_dict_dematerialization(self): c.a = 1 c.b = 2 c.__dict__ - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a self.assertEqual( _testinternalcapi.get_object_dict_values(c), @@ -1134,7 +1190,7 @@ def test_dict_dematerialization_multiple_refs(self): c.a = 1 c.b = 2 d = c.__dict__ - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a self.assertIs(c.__dict__, d) @@ -1143,7 +1199,7 @@ def test_dict_dematerialization_copy(self): c.a = 1 c.b = 2 c2 = copy.copy(c) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a c2.a self.assertEqual( @@ -1155,7 +1211,7 @@ def test_dict_dematerialization_copy(self): (1, 2, '') ) c3 = copy.deepcopy(c) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a c3.a self.assertEqual( @@ -1169,7 +1225,7 @@ def test_dict_dematerialization_pickle(self): c.a = 1 c.b = 2 c2 = pickle.loads(pickle.dumps(c)) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a c2.a self.assertEqual( @@ -1187,7 +1243,7 @@ class D(dict): pass c.a = 1 c.b = 2 c.__dict__ = D(c.__dict__) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a self.assertIs( _testinternalcapi.get_object_dict_values(c), @@ -1232,7 +1288,7 @@ def f(o, n): for i in range(n): o.b = i # Prime f to store to dict slot 1 - f(c, 100) + f(c, _testinternalcapi.SPECIALIZATION_THRESHOLD) test_obj = NoInlineAorB() test_obj.__dict__ = make_special_dict() @@ -1249,7 +1305,7 @@ class TestSpecializer(TestBase): @requires_specialization_ft def test_binary_op(self): def binary_op_add_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, 2 c = a + b self.assertEqual(c, 3) @@ -1259,7 +1315,7 @@ def binary_op_add_int(): self.assert_no_opcode(binary_op_add_int, "BINARY_OP") def binary_op_add_unicode(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = "foo", "bar" c = a + b self.assertEqual(c, "foobar") @@ -1268,6 +1324,103 @@ def binary_op_add_unicode(): self.assert_specialized(binary_op_add_unicode, "BINARY_OP_ADD_UNICODE") self.assert_no_opcode(binary_op_add_unicode, "BINARY_OP") + def binary_op_add_extend(): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + a, b = 6, 3.0 + c = a + b + self.assertEqual(c, 9.0) + c = b + a + self.assertEqual(c, 9.0) + c = a - b + self.assertEqual(c, 3.0) + c = b - a + self.assertEqual(c, -3.0) + c = a * b + self.assertEqual(c, 18.0) + c = b * a + self.assertEqual(c, 18.0) + c = a / b + self.assertEqual(c, 2.0) + c = b / a + self.assertEqual(c, 0.5) + + binary_op_add_extend() + self.assert_specialized(binary_op_add_extend, "BINARY_OP_EXTEND") + self.assert_no_opcode(binary_op_add_extend, "BINARY_OP") + + def binary_op_zero_division(): + def compactlong_lhs(arg): + 42 / arg + def float_lhs(arg): + 42.0 / arg + + with self.assertRaises(ZeroDivisionError): + compactlong_lhs(0) + with self.assertRaises(ZeroDivisionError): + compactlong_lhs(0.0) + with self.assertRaises(ZeroDivisionError): + float_lhs(0.0) + with self.assertRaises(ZeroDivisionError): + float_lhs(0) + + self.assert_no_opcode(compactlong_lhs, "BINARY_OP_EXTEND") + self.assert_no_opcode(float_lhs, "BINARY_OP_EXTEND") + + binary_op_zero_division() + + def binary_op_nan(): + def compactlong_lhs(arg): + return ( + 42 + arg, + 42 - arg, + 42 * arg, + 42 / arg, + ) + def compactlong_rhs(arg): + return ( + arg + 42, + arg - 42, + arg * 2, + arg / 42, + ) + nan = float('nan') + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + self.assertEqual(compactlong_lhs(1.0), (43.0, 41.0, 42.0, 42.0)) + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): + self.assertTrue(all(filter(lambda x: x is nan, compactlong_lhs(nan)))) + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + self.assertEqual(compactlong_rhs(42.0), (84.0, 0.0, 84.0, 1.0)) + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): + self.assertTrue(all(filter(lambda x: x is nan, compactlong_rhs(nan)))) + + self.assert_no_opcode(compactlong_lhs, "BINARY_OP_EXTEND") + self.assert_no_opcode(compactlong_rhs, "BINARY_OP_EXTEND") + + binary_op_nan() + + def binary_op_bitwise_extend(): + for _ in range(100): + a, b = 2, 7 + x = a | b + self.assertEqual(x, 7) + y = a & b + self.assertEqual(y, 2) + z = a ^ b + self.assertEqual(z, 5) + a, b = 3, 9 + a |= b + self.assertEqual(a, 11) + a, b = 11, 9 + a &= b + self.assertEqual(a, 9) + a, b = 3, 9 + a ^= b + self.assertEqual(a, 10) + + binary_op_bitwise_extend() + self.assert_specialized(binary_op_bitwise_extend, "BINARY_OP_EXTEND") + self.assert_no_opcode(binary_op_bitwise_extend, "BINARY_OP") + @cpython_only @requires_specialization_ft def test_load_super_attr(self): @@ -1278,7 +1431,7 @@ def __init__(self): meth = super().__init__ super().__init__() - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): A() self.assert_specialized(A.__init__, "LOAD_SUPER_ATTR_ATTR") @@ -1298,7 +1451,7 @@ def init(self): globals()['super'] = fake_super try: # Should be unspecialized after enough calls. - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): A() finally: globals()['super'] = real_super @@ -1311,7 +1464,7 @@ def init(self): @requires_specialization_ft def test_contain_op(self): def contains_op_dict(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, {1: 2, 2: 5} self.assertTrue(a in b) self.assertFalse(3 in b) @@ -1321,7 +1474,7 @@ def contains_op_dict(): self.assert_no_opcode(contains_op_dict, "CONTAINS_OP") def contains_op_set(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, {1, 2} self.assertTrue(a in b) self.assertFalse(3 in b) @@ -1348,7 +1501,7 @@ async def __aexit__(self, *exc): pass async def send_with(): - for i in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): async with CM(): x = 1 @@ -1366,7 +1519,7 @@ def g(): def send_yield_from(): yield from g() - for i in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): list(send_yield_from()) self.assert_specialized(send_yield_from, "SEND_GEN") @@ -1378,19 +1531,19 @@ def test_store_attr_slot(self): class C: __slots__ = ['x'] - def set_slot(): + def set_slot(n): c = C() - for i in range(100): + for i in range(n): c.x = i - set_slot() + set_slot(_testinternalcapi.SPECIALIZATION_THRESHOLD) self.assert_specialized(set_slot, "STORE_ATTR_SLOT") self.assert_no_opcode(set_slot, "STORE_ATTR") # Adding a property for 'x' should unspecialize it. C.x = property(lambda self: None, lambda self, x: None) - set_slot() + set_slot(_testinternalcapi.SPECIALIZATION_COOLDOWN) self.assert_no_opcode(set_slot, "STORE_ATTR_SLOT") @cpython_only @@ -1399,19 +1552,20 @@ def test_store_attr_instance_value(self): class C: pass - def set_value(): + @reset_code + def set_value(n): c = C() - for i in range(100): + for i in range(n): c.x = i - set_value() + set_value(_testinternalcapi.SPECIALIZATION_THRESHOLD) self.assert_specialized(set_value, "STORE_ATTR_INSTANCE_VALUE") self.assert_no_opcode(set_value, "STORE_ATTR") # Adding a property for 'x' should unspecialize it. C.x = property(lambda self: None, lambda self, x: None) - set_value() + set_value(_testinternalcapi.SPECIALIZATION_COOLDOWN) self.assert_no_opcode(set_value, "STORE_ATTR_INSTANCE_VALUE") @cpython_only @@ -1421,21 +1575,22 @@ class C: pass c = C() - for i in range(29): + for i in range(_testinternalcapi.SHARED_KEYS_MAX_SIZE - 1): setattr(c, f"_{i}", None) - def set_value(): - for i in range(100): + @reset_code + def set_value(n): + for i in range(n): c.x = i - set_value() + set_value(_testinternalcapi.SPECIALIZATION_THRESHOLD) self.assert_specialized(set_value, "STORE_ATTR_WITH_HINT") self.assert_no_opcode(set_value, "STORE_ATTR") # Adding a property for 'x' should unspecialize it. C.x = property(lambda self: None, lambda self, x: None) - set_value() + set_value(_testinternalcapi.SPECIALIZATION_COOLDOWN) self.assert_no_opcode(set_value, "STORE_ATTR_WITH_HINT") @cpython_only @@ -1443,14 +1598,15 @@ def set_value(): def test_to_bool(self): def to_bool_bool(): true_cnt, false_cnt = 0, 0 - elems = [e % 2 == 0 for e in range(100)] + elems = [e % 2 == 0 for e in range(_testinternalcapi.SPECIALIZATION_THRESHOLD)] for e in elems: if e: true_cnt += 1 else: false_cnt += 1 - self.assertEqual(true_cnt, 50) - self.assertEqual(false_cnt, 50) + d, m = divmod(_testinternalcapi.SPECIALIZATION_THRESHOLD, 2) + self.assertEqual(true_cnt, d + m) + self.assertEqual(false_cnt, d) to_bool_bool() self.assert_specialized(to_bool_bool, "TO_BOOL_BOOL") @@ -1458,12 +1614,12 @@ def to_bool_bool(): def to_bool_int(): count = 0 - for i in range(100): + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): if i: count += 1 else: count -= 1 - self.assertEqual(count, 98) + self.assertEqual(count, _testinternalcapi.SPECIALIZATION_THRESHOLD - 2) to_bool_int() self.assert_specialized(to_bool_int, "TO_BOOL_INT") @@ -1471,11 +1627,11 @@ def to_bool_int(): def to_bool_list(): count = 0 - elems = [1, 2, 3] + elems = list(range(_testinternalcapi.SPECIALIZATION_THRESHOLD)) while elems: count += elems.pop() self.assertEqual(elems, []) - self.assertEqual(count, 6) + self.assertEqual(count, sum(range(_testinternalcapi.SPECIALIZATION_THRESHOLD))) to_bool_list() self.assert_specialized(to_bool_list, "TO_BOOL_LIST") @@ -1483,11 +1639,11 @@ def to_bool_list(): def to_bool_none(): count = 0 - elems = [None, None, None, None] + elems = [None] * _testinternalcapi.SPECIALIZATION_THRESHOLD for e in elems: if not e: count += 1 - self.assertEqual(count, len(elems)) + self.assertEqual(count, _testinternalcapi.SPECIALIZATION_THRESHOLD) to_bool_none() self.assert_specialized(to_bool_none, "TO_BOOL_NONE") @@ -1495,11 +1651,11 @@ def to_bool_none(): def to_bool_str(): count = 0 - elems = ["", "foo", ""] + elems = [""] + ["foo"] * (_testinternalcapi.SPECIALIZATION_THRESHOLD - 1) for e in elems: if e: count += 1 - self.assertEqual(count, 1) + self.assertEqual(count, _testinternalcapi.SPECIALIZATION_THRESHOLD - 1) to_bool_str() self.assert_specialized(to_bool_str, "TO_BOOL_STR") @@ -1509,7 +1665,7 @@ def to_bool_str(): @requires_specialization_ft def test_unpack_sequence(self): def unpack_sequence_two_tuple(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, 2 self.assertEqual(a, 1) self.assertEqual(b, 2) @@ -1520,7 +1676,7 @@ def unpack_sequence_two_tuple(): self.assert_no_opcode(unpack_sequence_two_tuple, "UNPACK_SEQUENCE") def unpack_sequence_tuple(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, = 1, self.assertEqual(a, 1) @@ -1529,7 +1685,7 @@ def unpack_sequence_tuple(): self.assert_no_opcode(unpack_sequence_tuple, "UNPACK_SEQUENCE") def unpack_sequence_list(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = [1, 2] self.assertEqual(a, 1) self.assertEqual(b, 2) @@ -1542,7 +1698,7 @@ def unpack_sequence_list(): @requires_specialization_ft def test_binary_subscr(self): def binary_subscr_list_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = [1, 2, 3] for idx, expected in enumerate(a): self.assertEqual(a[idx], expected) @@ -1553,7 +1709,7 @@ def binary_subscr_list_int(): self.assert_no_opcode(binary_subscr_list_int, "BINARY_SUBSCR") def binary_subscr_tuple_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = (1, 2, 3) for idx, expected in enumerate(a): self.assertEqual(a[idx], expected) @@ -1564,7 +1720,7 @@ def binary_subscr_tuple_int(): self.assert_no_opcode(binary_subscr_tuple_int, "BINARY_SUBSCR") def binary_subscr_dict(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = {1: 2, 2: 3} self.assertEqual(a[1], 2) self.assertEqual(a[2], 3) @@ -1574,7 +1730,7 @@ def binary_subscr_dict(): self.assert_no_opcode(binary_subscr_dict, "BINARY_SUBSCR") def binary_subscr_str_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = "foobar" for idx, expected in enumerate(a): self.assertEqual(a[idx], expected) @@ -1590,14 +1746,61 @@ def __init__(self, val): def __getitem__(self, item): return self.val - items = [C(i) for i in range(100)] - for i in range(100): + items = [C(i) for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD)] + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertEqual(items[i][i], i) binary_subscr_getitems() self.assert_specialized(binary_subscr_getitems, "BINARY_SUBSCR_GETITEM") self.assert_no_opcode(binary_subscr_getitems, "BINARY_SUBSCR") + @cpython_only + @requires_specialization_ft + def test_compare_op(self): + def compare_op_int(): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + a, b = 1, 2 + c = a == b + self.assertFalse(c) + + compare_op_int() + self.assert_specialized(compare_op_int, "COMPARE_OP_INT") + self.assert_no_opcode(compare_op_int, "COMPARE_OP") + + def compare_op_float(): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + a, b = 1.0, 2.0 + c = a == b + self.assertFalse(c) + + compare_op_float() + self.assert_specialized(compare_op_float, "COMPARE_OP_FLOAT") + self.assert_no_opcode(compare_op_float, "COMPARE_OP") + + def compare_op_str(): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + a, b = "spam", "ham" + c = a == b + self.assertFalse(c) + + compare_op_str() + self.assert_specialized(compare_op_str, "COMPARE_OP_STR") + self.assert_no_opcode(compare_op_str, "COMPARE_OP") + + @cpython_only + @requires_specialization_ft + def test_load_const(self): + def load_const(): + def unused(): pass + # Currently, the empty tuple is immortal, and the otherwise + # unused nested function's code object is mortal. This test will + # have to use different values if either of that changes. + return () + + load_const() + self.assert_specialized(load_const, "LOAD_CONST_IMMORTAL") + self.assert_specialized(load_const, "LOAD_CONST_MORTAL") + self.assert_no_opcode(load_const, "LOAD_CONST") if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index d688a225538c11..6e40cb4f58bfee 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -230,6 +230,94 @@ def test_read(self): self.assertEqual(type(s), bytes) self.assertEqual(s, b"spam") + def test_readinto(self): + with open(os_helper.TESTFN, "w+b") as fobj: + fobj.write(b"spam") + fobj.flush() + fd = fobj.fileno() + os.lseek(fd, 0, 0) + # Oversized so readinto without hitting end. + buffer = bytearray(7) + s = os.readinto(fd, buffer) + self.assertEqual(type(s), int) + self.assertEqual(s, 4) + # Should overwrite the first 4 bytes of the buffer. + self.assertEqual(buffer[:4], b"spam") + + # Readinto at EOF should return 0 and not touch buffer. + buffer[:] = b"notspam" + s = os.readinto(fd, buffer) + self.assertEqual(type(s), int) + self.assertEqual(s, 0) + self.assertEqual(bytes(buffer), b"notspam") + s = os.readinto(fd, buffer) + self.assertEqual(s, 0) + self.assertEqual(bytes(buffer), b"notspam") + + # Readinto a 0 length bytearray when at EOF should return 0 + self.assertEqual(os.readinto(fd, bytearray()), 0) + + # Readinto a 0 length bytearray with data available should return 0. + os.lseek(fd, 0, 0) + self.assertEqual(os.readinto(fd, bytearray()), 0) + + @unittest.skipUnless(hasattr(os, 'get_blocking'), + 'needs os.get_blocking() and os.set_blocking()') + @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") + @unittest.skipIf(support.is_emscripten, "set_blocking does not work correctly") + def test_readinto_non_blocking(self): + # Verify behavior of a readinto which would block on a non-blocking fd. + r, w = os.pipe() + try: + os.set_blocking(r, False) + with self.assertRaises(BlockingIOError): + os.readinto(r, bytearray(5)) + + # Pass some data through + os.write(w, b"spam") + self.assertEqual(os.readinto(r, bytearray(4)), 4) + + # Still don't block or return 0. + with self.assertRaises(BlockingIOError): + os.readinto(r, bytearray(5)) + + # At EOF should return size 0 + os.close(w) + w = None + self.assertEqual(os.readinto(r, bytearray(5)), 0) + self.assertEqual(os.readinto(r, bytearray(5)), 0) # Still EOF + + finally: + os.close(r) + if w is not None: + os.close(w) + + def test_readinto_badarg(self): + with open(os_helper.TESTFN, "w+b") as fobj: + fobj.write(b"spam") + fobj.flush() + fd = fobj.fileno() + os.lseek(fd, 0, 0) + + for bad_arg in ("test", bytes(), 14): + with self.subTest(f"bad buffer {type(bad_arg)}"): + with self.assertRaises(TypeError): + os.readinto(fd, bad_arg) + + with self.subTest("doesn't work on file objects"): + with self.assertRaises(TypeError): + os.readinto(fobj, bytearray(5)) + + # takes two args + with self.assertRaises(TypeError): + os.readinto(fd) + + # No data should have been read with the bad arguments. + buffer = bytearray(4) + s = os.readinto(fd, buffer) + self.assertEqual(s, 4) + self.assertEqual(buffer, b"spam") + @support.cpython_only # Skip the test on 32-bit platforms: the number of bytes must fit in a # Py_ssize_t type @@ -249,6 +337,29 @@ def test_large_read(self, size): # operating system is free to return less bytes than requested. self.assertEqual(data, b'test') + + @support.cpython_only + # Skip the test on 32-bit platforms: the number of bytes must fit in a + # Py_ssize_t type + @unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX, + "needs INT_MAX < PY_SSIZE_T_MAX") + @support.bigmemtest(size=INT_MAX + 10, memuse=1, dry_run=False) + def test_large_readinto(self, size): + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + create_file(os_helper.TESTFN, b'test') + + # Issue #21932: For readinto the buffer contains the length rather than + # a length being passed explicitly to read, should still get capped to a + # valid size / not raise an OverflowError for sizes larger than INT_MAX. + buffer = bytearray(INT_MAX + 10) + with open(os_helper.TESTFN, "rb") as fp: + length = os.readinto(fp.fileno(), buffer) + + # The test does not try to read more than 2 GiB at once because the + # operating system is free to return less bytes than requested. + self.assertEqual(length, 4) + self.assertEqual(buffer[:4], b'test') + def test_write(self): # os.write() accepts bytes- and buffer-like objects but not strings fd = os.open(os_helper.TESTFN, os.O_CREAT | os.O_WRONLY) @@ -2467,6 +2578,10 @@ def test_lseek(self): def test_read(self): self.check(os.read, 1) + @unittest.skipUnless(hasattr(os, 'readinto'), 'test needs os.readinto()') + def test_readinto(self): + self.check(os.readinto, bytearray(5)) + @unittest.skipUnless(hasattr(os, 'readv'), 'test needs os.readv()') def test_readv(self): buf = bytearray(10) diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 6548577f4de12c..d64092b710a4d6 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -75,7 +75,7 @@ def test_is_notimplemented(self): # Tests for the pure classes. # -class PurePathTest(test_pathlib_abc.DummyPurePathTest): +class PurePathTest(test_pathlib_abc.DummyJoinablePathTest): cls = pathlib.PurePath # Make sure any symbolic links in the base test path are resolved. @@ -438,6 +438,84 @@ def test_match_empty(self): self.assertRaises(ValueError, P('a').match, '') self.assertRaises(ValueError, P('a').match, '.') + def test_match_common(self): + P = self.cls + # Simple relative pattern. + self.assertTrue(P('b.py').match('b.py')) + self.assertTrue(P('a/b.py').match('b.py')) + self.assertTrue(P('/a/b.py').match('b.py')) + self.assertFalse(P('a.py').match('b.py')) + self.assertFalse(P('b/py').match('b.py')) + self.assertFalse(P('/a.py').match('b.py')) + self.assertFalse(P('b.py/c').match('b.py')) + # Wildcard relative pattern. + self.assertTrue(P('b.py').match('*.py')) + self.assertTrue(P('a/b.py').match('*.py')) + self.assertTrue(P('/a/b.py').match('*.py')) + self.assertFalse(P('b.pyc').match('*.py')) + self.assertFalse(P('b./py').match('*.py')) + self.assertFalse(P('b.py/c').match('*.py')) + # Multi-part relative pattern. + self.assertTrue(P('ab/c.py').match('a*/*.py')) + self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) + self.assertFalse(P('a.py').match('a*/*.py')) + self.assertFalse(P('/dab/c.py').match('a*/*.py')) + self.assertFalse(P('ab/c.py/d').match('a*/*.py')) + # Absolute pattern. + self.assertTrue(P('/b.py').match('/*.py')) + self.assertFalse(P('b.py').match('/*.py')) + self.assertFalse(P('a/b.py').match('/*.py')) + self.assertFalse(P('/a/b.py').match('/*.py')) + # Multi-part absolute pattern. + self.assertTrue(P('/a/b.py').match('/a/*.py')) + self.assertFalse(P('/ab.py').match('/a/*.py')) + self.assertFalse(P('/a/b/c.py').match('/a/*.py')) + # Multi-part glob-style pattern. + self.assertFalse(P('/a/b/c.py').match('/**/*.py')) + self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) + # Case-sensitive flag + self.assertFalse(P('A.py').match('a.PY', case_sensitive=True)) + self.assertTrue(P('A.py').match('a.PY', case_sensitive=False)) + self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) + self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) + # Matching against empty path + self.assertFalse(P('').match('*')) + self.assertFalse(P('').match('**')) + self.assertFalse(P('').match('**/*')) + + @needs_posix + def test_match_posix(self): + P = self.cls + self.assertFalse(P('A.py').match('a.PY')) + + @needs_windows + def test_match_windows(self): + P = self.cls + # Absolute patterns. + self.assertTrue(P('c:/b.py').match('*:/*.py')) + self.assertTrue(P('c:/b.py').match('c:/*.py')) + self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive + self.assertFalse(P('b.py').match('/*.py')) + self.assertFalse(P('b.py').match('c:*.py')) + self.assertFalse(P('b.py').match('c:/*.py')) + self.assertFalse(P('c:b.py').match('/*.py')) + self.assertFalse(P('c:b.py').match('c:/*.py')) + self.assertFalse(P('/b.py').match('c:*.py')) + self.assertFalse(P('/b.py').match('c:/*.py')) + # UNC patterns. + self.assertTrue(P('//some/share/a.py').match('//*/*/*.py')) + self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) + self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) + self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) + # Case-insensitivity. + self.assertTrue(P('B.py').match('b.PY')) + self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) + self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) + # Path anchor doesn't match pattern anchor + self.assertFalse(P('c:/b.py').match('/*.py')) # 'c:/' vs '/' + self.assertFalse(P('c:/b.py').match('c:*.py')) # 'c:/' vs 'c:' + self.assertFalse(P('//some/share/a.py').match('/*.py')) # '//some/share/' vs '/' + @needs_posix def test_parse_path_posix(self): check = self._check_parse_path @@ -924,7 +1002,7 @@ class cls(pathlib.PurePath): # Tests for the concrete classes. # -class PathTest(test_pathlib_abc.DummyPathTest, PurePathTest): +class PathTest(test_pathlib_abc.DummyRWPathTest, PurePathTest): """Tests for the FS-accessing functionalities of the Path classes.""" cls = pathlib.Path can_symlink = os_helper.can_symlink() @@ -980,15 +1058,15 @@ def tempdir(self): self.addCleanup(os_helper.rmtree, d) return d - def test_matches_pathbase_docstrings(self): - path_names = {name for name in dir(pathlib._abc.PathBase) if name[0] != '_'} + def test_matches_writablepath_docstrings(self): + path_names = {name for name in dir(pathlib._abc.WritablePath) if name[0] != '_'} for attr_name in path_names: if attr_name == 'parser': - # On Windows, Path.parser is ntpath, but PathBase.parser is + # On Windows, Path.parser is ntpath, but WritablePath.parser is # posixpath, and so their docstrings differ. continue our_attr = getattr(self.cls, attr_name) - path_attr = getattr(pathlib._abc.PathBase, attr_name) + path_attr = getattr(pathlib._abc.WritablePath, attr_name) self.assertEqual(our_attr.__doc__, path_attr.__doc__) def test_concrete_class(self): @@ -1102,6 +1180,15 @@ def with_segments(self, *pathsegments): for dirpath, dirnames, filenames in p.walk(): self.assertEqual(42, dirpath.session_id) + def test_open_common(self): + p = self.cls(self.base) + with (p / 'fileA').open('r') as f: + self.assertIsInstance(f, io.TextIOBase) + self.assertEqual(f.read(), "this is file A\n") + with (p / 'fileA').open('rb') as f: + self.assertIsInstance(f, io.BufferedIOBase) + self.assertEqual(f.read().strip(), b"this is file A") + def test_open_unbuffered(self): p = self.cls(self.base) with (p / 'fileA').open('rb', buffering=0) as f: @@ -3019,7 +3106,7 @@ def test_group_windows(self): P('c:/').group() -class PathWalkTest(test_pathlib_abc.DummyPathWalkTest): +class PathWalkTest(test_pathlib_abc.DummyReadablePathWalkTest): cls = pathlib.Path base = PathTest.base can_symlink = PathTest.can_symlink diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index 87aef0c130cf9e..e67bead4297829 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -4,7 +4,7 @@ import errno import unittest -from pathlib._abc import PurePathBase, PathBase +from pathlib._abc import JoinablePath, ReadablePath, WritablePath, magic_open from pathlib._types import Parser import posixpath @@ -31,8 +31,8 @@ def needs_windows(fn): # -class PurePathBaseTest(unittest.TestCase): - cls = PurePathBase +class JoinablePathTest(unittest.TestCase): + cls = JoinablePath def test_magic_methods(self): P = self.cls @@ -51,7 +51,7 @@ def test_parser(self): self.assertIs(self.cls.parser, posixpath) -class DummyPurePath(PurePathBase): +class DummyJoinablePath(JoinablePath): __slots__ = ('_segments',) def __init__(self, *segments): @@ -63,7 +63,7 @@ def __str__(self): return '' def __eq__(self, other): - if not isinstance(other, DummyPurePath): + if not isinstance(other, DummyJoinablePath): return NotImplemented return str(self) == str(other) @@ -77,8 +77,8 @@ def with_segments(self, *pathsegments): return type(self)(*pathsegments) -class DummyPurePathTest(unittest.TestCase): - cls = DummyPurePath +class DummyJoinablePathTest(unittest.TestCase): + cls = DummyJoinablePath # Use a base path that's unrelated to any real filesystem path. base = f'/this/path/kills/fascists/{TESTFN}' @@ -296,88 +296,6 @@ def test_str_windows(self): p = self.cls('//a/b/c/d') self.assertEqual(str(p), '\\\\a\\b\\c\\d') - def test_match_empty(self): - P = self.cls - self.assertRaises(ValueError, P('a').match, '') - - def test_match_common(self): - P = self.cls - # Simple relative pattern. - self.assertTrue(P('b.py').match('b.py')) - self.assertTrue(P('a/b.py').match('b.py')) - self.assertTrue(P('/a/b.py').match('b.py')) - self.assertFalse(P('a.py').match('b.py')) - self.assertFalse(P('b/py').match('b.py')) - self.assertFalse(P('/a.py').match('b.py')) - self.assertFalse(P('b.py/c').match('b.py')) - # Wildcard relative pattern. - self.assertTrue(P('b.py').match('*.py')) - self.assertTrue(P('a/b.py').match('*.py')) - self.assertTrue(P('/a/b.py').match('*.py')) - self.assertFalse(P('b.pyc').match('*.py')) - self.assertFalse(P('b./py').match('*.py')) - self.assertFalse(P('b.py/c').match('*.py')) - # Multi-part relative pattern. - self.assertTrue(P('ab/c.py').match('a*/*.py')) - self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) - self.assertFalse(P('a.py').match('a*/*.py')) - self.assertFalse(P('/dab/c.py').match('a*/*.py')) - self.assertFalse(P('ab/c.py/d').match('a*/*.py')) - # Absolute pattern. - self.assertTrue(P('/b.py').match('/*.py')) - self.assertFalse(P('b.py').match('/*.py')) - self.assertFalse(P('a/b.py').match('/*.py')) - self.assertFalse(P('/a/b.py').match('/*.py')) - # Multi-part absolute pattern. - self.assertTrue(P('/a/b.py').match('/a/*.py')) - self.assertFalse(P('/ab.py').match('/a/*.py')) - self.assertFalse(P('/a/b/c.py').match('/a/*.py')) - # Multi-part glob-style pattern. - self.assertFalse(P('/a/b/c.py').match('/**/*.py')) - self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) - # Case-sensitive flag - self.assertFalse(P('A.py').match('a.PY', case_sensitive=True)) - self.assertTrue(P('A.py').match('a.PY', case_sensitive=False)) - self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) - self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) - # Matching against empty path - self.assertFalse(P('').match('*')) - self.assertFalse(P('').match('**')) - self.assertFalse(P('').match('**/*')) - - @needs_posix - def test_match_posix(self): - P = self.cls - self.assertFalse(P('A.py').match('a.PY')) - - @needs_windows - def test_match_windows(self): - P = self.cls - # Absolute patterns. - self.assertTrue(P('c:/b.py').match('*:/*.py')) - self.assertTrue(P('c:/b.py').match('c:/*.py')) - self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive - self.assertFalse(P('b.py').match('/*.py')) - self.assertFalse(P('b.py').match('c:*.py')) - self.assertFalse(P('b.py').match('c:/*.py')) - self.assertFalse(P('c:b.py').match('/*.py')) - self.assertFalse(P('c:b.py').match('c:/*.py')) - self.assertFalse(P('/b.py').match('c:*.py')) - self.assertFalse(P('/b.py').match('c:/*.py')) - # UNC patterns. - self.assertTrue(P('//some/share/a.py').match('//*/*/*.py')) - self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) - self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) - self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) - # Case-insensitivity. - self.assertTrue(P('B.py').match('b.PY')) - self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) - self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) - # Path anchor doesn't match pattern anchor - self.assertFalse(P('c:/b.py').match('/*.py')) # 'c:/' vs '/' - self.assertFalse(P('c:/b.py').match('c:*.py')) # 'c:/' vs 'c:' - self.assertFalse(P('//some/share/a.py').match('/*.py')) # '//some/share/' vs '/' - def test_full_match_common(self): P = self.cls # Simple relative pattern. @@ -916,9 +834,9 @@ def test_with_suffix_invalid(self): # -class DummyPathIO(io.BytesIO): +class DummyWritablePathIO(io.BytesIO): """ - Used by DummyPath to implement `open('w')` + Used by DummyWritablePath to implement `__open_wb__()` """ def __init__(self, files, path): @@ -931,38 +849,16 @@ def close(self): super().close() -class DummyPath(PathBase): +class DummyReadablePath(ReadablePath, DummyJoinablePath): """ - Simple implementation of PathBase that keeps files and directories in - memory. + Simple implementation of DummyReadablePath that keeps files and + directories in memory. """ - __slots__ = ('_segments') + __slots__ = () _files = {} _directories = {} - def __init__(self, *segments): - self._segments = segments - - def __str__(self): - if self._segments: - return self.parser.join(*self._segments) - return '' - - def __eq__(self, other): - if not isinstance(other, DummyPath): - return NotImplemented - return str(self) == str(other) - - def __hash__(self): - return hash(str(self)) - - def __repr__(self): - return "{}({!r})".format(self.__class__.__name__, str(self)) - - def with_segments(self, *pathsegments): - return type(self)(*pathsegments) - def exists(self, *, follow_symlinks=True): return self.is_dir() or self.is_file() @@ -975,32 +871,13 @@ def is_file(self, *, follow_symlinks=True): def is_symlink(self): return False - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): - if buffering != -1 and not (buffering == 0 and 'b' in mode): - raise NotImplementedError + def __open_rb__(self, buffering=-1): path = str(self) if path in self._directories: raise IsADirectoryError(errno.EISDIR, "Is a directory", path) - - text = 'b' not in mode - mode = ''.join(c for c in mode if c not in 'btU') - if mode == 'r': - if path not in self._files: - raise FileNotFoundError(errno.ENOENT, "File not found", path) - stream = io.BytesIO(self._files[path]) - elif mode == 'w': - parent, name = posixpath.split(path) - if parent not in self._directories: - raise FileNotFoundError(errno.ENOENT, "File not found", parent) - stream = DummyPathIO(self._files, path) - self._files[path] = b'' - self._directories[parent].add(name) - else: - raise NotImplementedError - if text: - stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors, newline=newline) - return stream + elif path not in self._files: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + return io.BytesIO(self._files[path]) def iterdir(self): path = str(self).rstrip('/') @@ -1011,6 +888,21 @@ def iterdir(self): else: raise FileNotFoundError(errno.ENOENT, "File not found", path) + +class DummyWritablePath(WritablePath, DummyJoinablePath): + __slots__ = () + + def __open_wb__(self, buffering=-1): + path = str(self) + if path in self._directories: + raise IsADirectoryError(errno.EISDIR, "Is a directory", path) + parent, name = posixpath.split(path) + if parent not in self._directories: + raise FileNotFoundError(errno.ENOENT, "File not found", parent) + self._files[path] = b'' + self._directories[parent].add(name) + return DummyWritablePathIO(self._files, path) + def mkdir(self, mode=0o777, parents=False, exist_ok=False): path = str(self) parent = str(self.parent) @@ -1029,24 +921,11 @@ def mkdir(self, mode=0o777, parents=False, exist_ok=False): self.parent.mkdir(parents=True, exist_ok=True) self.mkdir(mode, parents=False, exist_ok=exist_ok) - def _delete(self): - path = str(self) - if path in self._files: - del self._files[path] - elif path in self._directories: - for name in list(self._directories[path]): - self.joinpath(name)._delete() - del self._directories[path] - else: - raise FileNotFoundError(errno.ENOENT, "File not found", path) - parent = str(self.parent) - self._directories[parent].remove(self.name) - -class DummyPathTest(DummyPurePathTest): - """Tests for PathBase methods that use stat(), open() and iterdir().""" +class DummyReadablePathTest(DummyJoinablePathTest): + """Tests for ReadablePathTest methods that use stat(), open() and iterdir().""" - cls = DummyPath + cls = DummyReadablePath can_symlink = False # (self.base) @@ -1129,222 +1008,15 @@ def test_exists(self): self.assertIs(False, P(self.base + '\udfff').exists()) self.assertIs(False, P(self.base + '\x00').exists()) - def test_open_common(self): + def test_magic_open(self): p = self.cls(self.base) - with (p / 'fileA').open('r') as f: + with magic_open(p / 'fileA', 'r') as f: self.assertIsInstance(f, io.TextIOBase) self.assertEqual(f.read(), "this is file A\n") - with (p / 'fileA').open('rb') as f: + with magic_open(p / 'fileA', 'rb') as f: self.assertIsInstance(f, io.BufferedIOBase) self.assertEqual(f.read().strip(), b"this is file A") - def test_read_write_bytes(self): - p = self.cls(self.base) - (p / 'fileA').write_bytes(b'abcdefg') - self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') - # Check that trying to write str does not truncate the file. - self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr') - self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') - - def test_read_write_text(self): - p = self.cls(self.base) - (p / 'fileA').write_text('äbcdefg', encoding='latin-1') - self.assertEqual((p / 'fileA').read_text( - encoding='utf-8', errors='ignore'), 'bcdefg') - # Check that trying to write bytes does not truncate the file. - self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes') - self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg') - - def test_read_text_with_newlines(self): - p = self.cls(self.base) - # Check that `\n` character change nothing - (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_text(newline='\n'), - 'abcde\r\nfghlk\n\rmnopq') - # Check that `\r` character replaces `\n` - (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_text(newline='\r'), - 'abcde\r\nfghlk\n\rmnopq') - # Check that `\r\n` character replaces `\n` - (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_text(newline='\r\n'), - 'abcde\r\nfghlk\n\rmnopq') - - def test_write_text_with_newlines(self): - p = self.cls(self.base) - # Check that `\n` character change nothing - (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\n') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde\r\nfghlk\n\rmnopq') - # Check that `\r` character replaces `\n` - (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde\r\rfghlk\r\rmnopq') - # Check that `\r\n` character replaces `\n` - (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r\n') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde\r\r\nfghlk\r\n\rmnopq') - # Check that no argument passed will change `\n` to `os.linesep` - os_linesep_byte = bytes(os.linesep, encoding='ascii') - (p / 'fileA').write_text('abcde\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde' + os_linesep_byte + b'fghlk' + os_linesep_byte + b'\rmnopq') - - def test_copy_file(self): - base = self.cls(self.base) - source = base / 'fileA' - target = base / 'copyA' - result = source.copy(target) - self.assertEqual(result, target) - self.assertTrue(target.exists()) - self.assertEqual(source.read_text(), target.read_text()) - - def test_copy_file_to_existing_file(self): - base = self.cls(self.base) - source = base / 'fileA' - target = base / 'dirB' / 'fileB' - result = source.copy(target) - self.assertEqual(result, target) - self.assertTrue(target.exists()) - self.assertEqual(source.read_text(), target.read_text()) - - def test_copy_file_to_existing_directory(self): - base = self.cls(self.base) - source = base / 'fileA' - target = base / 'dirA' - self.assertRaises(OSError, source.copy, target) - - def test_copy_file_empty(self): - base = self.cls(self.base) - source = base / 'empty' - target = base / 'copyA' - source.write_bytes(b'') - result = source.copy(target) - self.assertEqual(result, target) - self.assertTrue(target.exists()) - self.assertEqual(target.read_bytes(), b'') - - def test_copy_file_to_itself(self): - base = self.cls(self.base) - source = base / 'empty' - source.write_bytes(b'') - self.assertRaises(OSError, source.copy, source) - self.assertRaises(OSError, source.copy, source, follow_symlinks=False) - - def test_copy_dir_simple(self): - base = self.cls(self.base) - source = base / 'dirC' - target = base / 'copyC' - result = source.copy(target) - self.assertEqual(result, target) - self.assertTrue(target.is_dir()) - self.assertTrue(target.joinpath('dirD').is_dir()) - self.assertTrue(target.joinpath('dirD', 'fileD').is_file()) - self.assertEqual(target.joinpath('dirD', 'fileD').read_text(), - "this is file D\n") - self.assertTrue(target.joinpath('fileC').is_file()) - self.assertTrue(target.joinpath('fileC').read_text(), - "this is file C\n") - - def test_copy_dir_complex(self, follow_symlinks=True): - def ordered_walk(path): - for dirpath, dirnames, filenames in path.walk(follow_symlinks=follow_symlinks): - dirnames.sort() - filenames.sort() - yield dirpath, dirnames, filenames - base = self.cls(self.base) - source = base / 'dirC' - - if self.can_symlink: - # Add some symlinks - source.joinpath('linkC').symlink_to('fileC') - source.joinpath('linkD').symlink_to('dirD', target_is_directory=True) - - # Perform the copy - target = base / 'copyC' - result = source.copy(target, follow_symlinks=follow_symlinks) - self.assertEqual(result, target) - - # Compare the source and target trees - source_walk = ordered_walk(source) - target_walk = ordered_walk(target) - for source_item, target_item in zip(source_walk, target_walk, strict=True): - self.assertEqual(source_item[0].parts[len(source.parts):], - target_item[0].parts[len(target.parts):]) # dirpath - self.assertEqual(source_item[1], target_item[1]) # dirnames - self.assertEqual(source_item[2], target_item[2]) # filenames - # Compare files and symlinks - for filename in source_item[2]: - source_file = source_item[0].joinpath(filename) - target_file = target_item[0].joinpath(filename) - if follow_symlinks or not source_file.is_symlink(): - # Regular file. - self.assertEqual(source_file.read_bytes(), target_file.read_bytes()) - elif source_file.is_dir(): - # Symlink to directory. - self.assertTrue(target_file.is_dir()) - self.assertEqual(source_file.readlink(), target_file.readlink()) - else: - # Symlink to file. - self.assertEqual(source_file.read_bytes(), target_file.read_bytes()) - self.assertEqual(source_file.readlink(), target_file.readlink()) - - def test_copy_dir_complex_follow_symlinks_false(self): - self.test_copy_dir_complex(follow_symlinks=False) - - def test_copy_dir_to_existing_directory(self): - base = self.cls(self.base) - source = base / 'dirC' - target = base / 'copyC' - target.mkdir() - target.joinpath('dirD').mkdir() - self.assertRaises(FileExistsError, source.copy, target) - - def test_copy_dir_to_existing_directory_dirs_exist_ok(self): - base = self.cls(self.base) - source = base / 'dirC' - target = base / 'copyC' - target.mkdir() - target.joinpath('dirD').mkdir() - result = source.copy(target, dirs_exist_ok=True) - self.assertEqual(result, target) - self.assertTrue(target.is_dir()) - self.assertTrue(target.joinpath('dirD').is_dir()) - self.assertTrue(target.joinpath('dirD', 'fileD').is_file()) - self.assertEqual(target.joinpath('dirD', 'fileD').read_text(), - "this is file D\n") - self.assertTrue(target.joinpath('fileC').is_file()) - self.assertTrue(target.joinpath('fileC').read_text(), - "this is file C\n") - - def test_copy_dir_to_itself(self): - base = self.cls(self.base) - source = base / 'dirC' - self.assertRaises(OSError, source.copy, source) - self.assertRaises(OSError, source.copy, source, follow_symlinks=False) - - def test_copy_dir_into_itself(self): - base = self.cls(self.base) - source = base / 'dirC' - target = base / 'dirC' / 'dirD' / 'copyC' - self.assertRaises(OSError, source.copy, target) - self.assertRaises(OSError, source.copy, target, follow_symlinks=False) - self.assertFalse(target.exists()) - - def test_copy_into(self): - base = self.cls(self.base) - source = base / 'fileA' - target_dir = base / 'dirA' - result = source.copy_into(target_dir) - self.assertEqual(result, target_dir / 'fileA') - self.assertTrue(result.exists()) - self.assertEqual(source.read_text(), result.read_text()) - - def test_copy_into_empty_name(self): - source = self.cls('') - target_dir = self.base - self.assertRaises(ValueError, source.copy_into, target_dir) - def test_iterdir(self): P = self.cls p = P(self.base) @@ -1574,9 +1246,229 @@ def test_is_symlink(self): self.assertIs((P / 'linkA\x00').is_file(), False) -class DummyPathWalkTest(unittest.TestCase): - cls = DummyPath - base = DummyPathTest.base +class DummyWritablePathTest(DummyJoinablePathTest): + cls = DummyWritablePath + + +class DummyRWPath(DummyWritablePath, DummyReadablePath): + __slots__ = () + + +class DummyRWPathTest(DummyWritablePathTest, DummyReadablePathTest): + cls = DummyRWPath + can_symlink = False + + def test_read_write_bytes(self): + p = self.cls(self.base) + (p / 'fileA').write_bytes(b'abcdefg') + self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') + # Check that trying to write str does not truncate the file. + self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr') + self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') + + def test_read_write_text(self): + p = self.cls(self.base) + (p / 'fileA').write_text('äbcdefg', encoding='latin-1') + self.assertEqual((p / 'fileA').read_text( + encoding='utf-8', errors='ignore'), 'bcdefg') + # Check that trying to write bytes does not truncate the file. + self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes') + self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg') + + def test_read_text_with_newlines(self): + p = self.cls(self.base) + # Check that `\n` character change nothing + (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_text(newline='\n'), + 'abcde\r\nfghlk\n\rmnopq') + # Check that `\r` character replaces `\n` + (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_text(newline='\r'), + 'abcde\r\nfghlk\n\rmnopq') + # Check that `\r\n` character replaces `\n` + (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_text(newline='\r\n'), + 'abcde\r\nfghlk\n\rmnopq') + + def test_write_text_with_newlines(self): + p = self.cls(self.base) + # Check that `\n` character change nothing + (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\n') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde\r\nfghlk\n\rmnopq') + # Check that `\r` character replaces `\n` + (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde\r\rfghlk\r\rmnopq') + # Check that `\r\n` character replaces `\n` + (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r\n') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde\r\r\nfghlk\r\n\rmnopq') + # Check that no argument passed will change `\n` to `os.linesep` + os_linesep_byte = bytes(os.linesep, encoding='ascii') + (p / 'fileA').write_text('abcde\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde' + os_linesep_byte + b'fghlk' + os_linesep_byte + b'\rmnopq') + + def test_copy_file(self): + base = self.cls(self.base) + source = base / 'fileA' + target = base / 'copyA' + result = source.copy(target) + self.assertEqual(result, target) + self.assertTrue(target.exists()) + self.assertEqual(source.read_text(), target.read_text()) + + def test_copy_file_to_existing_file(self): + base = self.cls(self.base) + source = base / 'fileA' + target = base / 'dirB' / 'fileB' + result = source.copy(target) + self.assertEqual(result, target) + self.assertTrue(target.exists()) + self.assertEqual(source.read_text(), target.read_text()) + + def test_copy_file_to_existing_directory(self): + base = self.cls(self.base) + source = base / 'fileA' + target = base / 'dirA' + self.assertRaises(OSError, source.copy, target) + + def test_copy_file_empty(self): + base = self.cls(self.base) + source = base / 'empty' + target = base / 'copyA' + source.write_bytes(b'') + result = source.copy(target) + self.assertEqual(result, target) + self.assertTrue(target.exists()) + self.assertEqual(target.read_bytes(), b'') + + def test_copy_file_to_itself(self): + base = self.cls(self.base) + source = base / 'empty' + source.write_bytes(b'') + self.assertRaises(OSError, source.copy, source) + self.assertRaises(OSError, source.copy, source, follow_symlinks=False) + + def test_copy_dir_simple(self): + base = self.cls(self.base) + source = base / 'dirC' + target = base / 'copyC' + result = source.copy(target) + self.assertEqual(result, target) + self.assertTrue(target.is_dir()) + self.assertTrue(target.joinpath('dirD').is_dir()) + self.assertTrue(target.joinpath('dirD', 'fileD').is_file()) + self.assertEqual(target.joinpath('dirD', 'fileD').read_text(), + "this is file D\n") + self.assertTrue(target.joinpath('fileC').is_file()) + self.assertTrue(target.joinpath('fileC').read_text(), + "this is file C\n") + + def test_copy_dir_complex(self, follow_symlinks=True): + def ordered_walk(path): + for dirpath, dirnames, filenames in path.walk(follow_symlinks=follow_symlinks): + dirnames.sort() + filenames.sort() + yield dirpath, dirnames, filenames + base = self.cls(self.base) + source = base / 'dirC' + + if self.can_symlink: + # Add some symlinks + source.joinpath('linkC').symlink_to('fileC') + source.joinpath('linkD').symlink_to('dirD', target_is_directory=True) + + # Perform the copy + target = base / 'copyC' + result = source.copy(target, follow_symlinks=follow_symlinks) + self.assertEqual(result, target) + + # Compare the source and target trees + source_walk = ordered_walk(source) + target_walk = ordered_walk(target) + for source_item, target_item in zip(source_walk, target_walk, strict=True): + self.assertEqual(source_item[0].parts[len(source.parts):], + target_item[0].parts[len(target.parts):]) # dirpath + self.assertEqual(source_item[1], target_item[1]) # dirnames + self.assertEqual(source_item[2], target_item[2]) # filenames + # Compare files and symlinks + for filename in source_item[2]: + source_file = source_item[0].joinpath(filename) + target_file = target_item[0].joinpath(filename) + if follow_symlinks or not source_file.is_symlink(): + # Regular file. + self.assertEqual(source_file.read_bytes(), target_file.read_bytes()) + elif source_file.is_dir(): + # Symlink to directory. + self.assertTrue(target_file.is_dir()) + self.assertEqual(source_file.readlink(), target_file.readlink()) + else: + # Symlink to file. + self.assertEqual(source_file.read_bytes(), target_file.read_bytes()) + self.assertEqual(source_file.readlink(), target_file.readlink()) + + def test_copy_dir_complex_follow_symlinks_false(self): + self.test_copy_dir_complex(follow_symlinks=False) + + def test_copy_dir_to_existing_directory(self): + base = self.cls(self.base) + source = base / 'dirC' + target = base / 'copyC' + target.mkdir() + target.joinpath('dirD').mkdir() + self.assertRaises(FileExistsError, source.copy, target) + + def test_copy_dir_to_existing_directory_dirs_exist_ok(self): + base = self.cls(self.base) + source = base / 'dirC' + target = base / 'copyC' + target.mkdir() + target.joinpath('dirD').mkdir() + result = source.copy(target, dirs_exist_ok=True) + self.assertEqual(result, target) + self.assertTrue(target.is_dir()) + self.assertTrue(target.joinpath('dirD').is_dir()) + self.assertTrue(target.joinpath('dirD', 'fileD').is_file()) + self.assertEqual(target.joinpath('dirD', 'fileD').read_text(), + "this is file D\n") + self.assertTrue(target.joinpath('fileC').is_file()) + self.assertTrue(target.joinpath('fileC').read_text(), + "this is file C\n") + + def test_copy_dir_to_itself(self): + base = self.cls(self.base) + source = base / 'dirC' + self.assertRaises(OSError, source.copy, source) + self.assertRaises(OSError, source.copy, source, follow_symlinks=False) + + def test_copy_dir_into_itself(self): + base = self.cls(self.base) + source = base / 'dirC' + target = base / 'dirC' / 'dirD' / 'copyC' + self.assertRaises(OSError, source.copy, target) + self.assertRaises(OSError, source.copy, target, follow_symlinks=False) + self.assertFalse(target.exists()) + + def test_copy_into(self): + base = self.cls(self.base) + source = base / 'fileA' + target_dir = base / 'dirA' + result = source.copy_into(target_dir) + self.assertEqual(result, target_dir / 'fileA') + self.assertTrue(result.exists()) + self.assertEqual(source.read_text(), result.read_text()) + + def test_copy_into_empty_name(self): + source = self.cls('') + target_dir = self.base + self.assertRaises(ValueError, source.copy_into, target_dir) + + +class DummyReadablePathWalkTest(unittest.TestCase): + cls = DummyReadablePath + base = DummyReadablePathTest.base can_symlink = False def setUp(self): diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index c5ee8c5fb25350..4d371a6e754b96 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -3009,6 +3009,57 @@ def test_pdb_f_trace_lines(): (Pdb) continue """ +def test_pdb_frame_refleak(): + """ + pdb should not leak reference to frames + + >>> def frame_leaker(container): + ... import sys + ... container.append(sys._getframe()) + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... pass + + >>> def test_function(): + ... import gc + ... container = [] + ... frame_leaker(container) # c + ... print(len(gc.get_referrers(container[0]))) + ... container = [] + ... frame_leaker(container) # n c + ... print(len(gc.get_referrers(container[0]))) + ... container = [] + ... frame_leaker(container) # r c + ... print(len(gc.get_referrers(container[0]))) + + >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE + ... 'continue', + ... 'next', + ... 'continue', + ... 'return', + ... 'continue', + ... ]): + ... test_function() + > (4)frame_leaker() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) continue + 1 + > (4)frame_leaker() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) next + > (5)frame_leaker() + -> pass + (Pdb) continue + 1 + > (4)frame_leaker() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) return + --Return-- + > (5)frame_leaker()->None + -> pass + (Pdb) continue + 1 + """ + def test_pdb_function_break(): """Testing the line number of break on function @@ -3152,16 +3203,12 @@ def run_pdb_script(self, script, commands, self.addCleanup(os_helper.unlink, '.pdbrc') self.addCleanup(os_helper.unlink, filename) - homesave = None - if remove_home: - homesave = os.environ.pop('HOME', None) - try: + with os_helper.EnvironmentVarGuard() as env: + if remove_home: + env.unset('HOME') if script_args is None: script_args = [] stdout, stderr = self._run_pdb([filename] + script_args, commands, expected_returncode, extra_env) - finally: - if homesave is not None: - os.environ['HOME'] = homesave return stdout, stderr def run_pdb_module(self, script, commands): @@ -3585,17 +3632,14 @@ def test_readrc_kwarg(self): self.assertIn("NameError: name 'invalid' is not defined", stdout) def test_readrc_homedir(self): - save_home = os.environ.pop("HOME", None) - with os_helper.temp_dir() as temp_dir, patch("os.path.expanduser"): - rc_path = os.path.join(temp_dir, ".pdbrc") - os.path.expanduser.return_value = rc_path - try: + with os_helper.EnvironmentVarGuard() as env: + env.unset("HOME") + with os_helper.temp_dir() as temp_dir, patch("os.path.expanduser"): + rc_path = os.path.join(temp_dir, ".pdbrc") + os.path.expanduser.return_value = rc_path with open(rc_path, "w") as f: f.write("invalid") self.assertEqual(pdb.Pdb().rcLines[0], "invalid") - finally: - if save_home is not None: - os.environ["HOME"] = save_home def test_header(self): stdout = StringIO() @@ -4193,6 +4237,62 @@ def test_checkline_is_not_executable(self): self.assertFalse(db.checkline(os_helper.TESTFN, lineno)) +@support.requires_subprocess() +class PdbTestInline(unittest.TestCase): + @unittest.skipIf(sys.flags.safe_path, + 'PYTHONSAFEPATH changes default sys.path') + def _run_script(self, script, commands, + expected_returncode=0, + extra_env=None): + self.addCleanup(os_helper.rmtree, '__pycache__') + filename = 'main.py' + with open(filename, 'w') as f: + f.write(textwrap.dedent(script)) + self.addCleanup(os_helper.unlink, filename) + + commands = textwrap.dedent(commands) + + cmd = [sys.executable, 'main.py'] + if extra_env is not None: + env = os.environ | extra_env + else: + env = os.environ + with subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + env = {**env, 'PYTHONIOENCODING': 'utf-8'} + ) as proc: + stdout, stderr = proc.communicate(str.encode(commands)) + stdout = bytes.decode(stdout) if isinstance(stdout, bytes) else stdout + stderr = bytes.decode(stderr) if isinstance(stderr, bytes) else stderr + self.assertEqual( + proc.returncode, + expected_returncode, + f"Unexpected return code\nstdout: {stdout}\nstderr: {stderr}" + ) + return stdout, stderr + + def test_quit(self): + script = """ + x = 1 + breakpoint() + """ + + commands = """ + quit + n + p x + 1 + quit + y + """ + + stdout, stderr = self._run_script(script, commands) + self.assertIn("2", stdout) + self.assertIn("Quit anyway", stdout) + + @support.requires_subprocess() class PdbTestReadline(unittest.TestCase): def setUpClass(): diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index c7da151dce3b37..b5b2b350e77a3b 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -1193,5 +1193,56 @@ def get_insts(lno1, lno2, op1, op2): ] self.cfg_optimization_test(insts, expected_insts, consts=list(range(5))) + def test_list_to_tuple_get_iter(self): + # for _ in (*foo, *bar) -> for _ in [*foo, *bar] + INTRINSIC_LIST_TO_TUPLE = 6 + insts = [ + ("BUILD_LIST", 0, 1), + ("LOAD_FAST", 0, 2), + ("LIST_EXTEND", 1, 3), + ("LOAD_FAST", 1, 4), + ("LIST_EXTEND", 1, 5), + ("CALL_INTRINSIC_1", INTRINSIC_LIST_TO_TUPLE, 6), + ("GET_ITER", None, 7), + top := self.Label(), + ("FOR_ITER", end := self.Label(), 8), + ("STORE_FAST", 2, 9), + ("JUMP", top, 10), + end, + ("END_FOR", None, 11), + ("POP_TOP", None, 12), + ("LOAD_CONST", 0, 13), + ("RETURN_VALUE", None, 14), + ] + expected_insts = [ + ("BUILD_LIST", 0, 1), + ("LOAD_FAST", 0, 2), + ("LIST_EXTEND", 1, 3), + ("LOAD_FAST", 1, 4), + ("LIST_EXTEND", 1, 5), + ("NOP", None, 6), # ("CALL_INTRINSIC_1", INTRINSIC_LIST_TO_TUPLE, 6), + ("GET_ITER", None, 7), + top := self.Label(), + ("FOR_ITER", end := self.Label(), 8), + ("STORE_FAST", 2, 9), + ("JUMP", top, 10), + end, + ("END_FOR", None, 11), + ("POP_TOP", None, 12), + ("LOAD_CONST", 0, 13), + ("RETURN_VALUE", None, 14), + ] + self.cfg_optimization_test(insts, expected_insts, consts=[None]) + + def test_list_to_tuple_get_iter_is_safe(self): + a, b = [], [] + for item in (*(items := [0, 1, 2, 3]),): + a.append(item) + b.append(items.pop()) + self.assertEqual(a, [0, 1, 2, 3]) + self.assertEqual(b, [3, 2, 1, 0]) + self.assertEqual(items, []) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_perf_profiler.py b/Lib/test/test_perf_profiler.py index 1e74990878007a..6f1fd8d38e4ea0 100644 --- a/Lib/test/test_perf_profiler.py +++ b/Lib/test/test_perf_profiler.py @@ -47,6 +47,7 @@ def tearDown(self) -> None: for file in files_to_delete: file.unlink() + @unittest.skipIf(support.check_bolt_optimized, "fails on BOLT instrumented binaries") def test_trampoline_works(self): code = """if 1: def foo(): @@ -100,6 +101,7 @@ def baz(): "Address should contain only hex characters", ) + @unittest.skipIf(support.check_bolt_optimized, "fails on BOLT instrumented binaries") def test_trampoline_works_with_forks(self): code = """if 1: import os, sys @@ -160,6 +162,7 @@ def baz(): self.assertIn(f"py::bar_fork:{script}", child_perf_file_contents) self.assertIn(f"py::baz_fork:{script}", child_perf_file_contents) + @unittest.skipIf(support.check_bolt_optimized, "fails on BOLT instrumented binaries") def test_sys_api(self): code = """if 1: import sys diff --git a/Lib/test/test_pyclbr.py b/Lib/test/test_pyclbr.py index 4bf0576586cca5..25b313f6c25a4e 100644 --- a/Lib/test/test_pyclbr.py +++ b/Lib/test/test_pyclbr.py @@ -31,14 +31,6 @@ def assertListEq(self, l1, l2, ignore): print("l1=%r\nl2=%r\nignore=%r" % (l1, l2, ignore), file=sys.stderr) self.fail("%r missing" % missing.pop()) - def assertHasattr(self, obj, attr, ignore): - ''' succeed iff hasattr(obj,attr) or attr in ignore. ''' - if attr in ignore: return - if not hasattr(obj, attr): print("???", attr) - self.assertTrue(hasattr(obj, attr), - 'expected hasattr(%r, %r)' % (obj, attr)) - - def assertHaskey(self, obj, key, ignore): ''' succeed iff key in obj or key in ignore. ''' if key in ignore: return @@ -86,7 +78,7 @@ def ismethod(oclass, obj, name): for name, value in dict.items(): if name in ignore: continue - self.assertHasattr(module, name, ignore) + self.assertHasAttr(module, name, ignore) py_item = getattr(module, name) if isinstance(value, pyclbr.Function): self.assertIsInstance(py_item, (FunctionType, BuiltinFunctionType)) diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py index c798b11f5aa56e..b02ba3aafd4d20 100644 --- a/Lib/test/test_pydoc/test_pydoc.py +++ b/Lib/test/test_pydoc/test_pydoc.py @@ -4,6 +4,7 @@ import contextlib import importlib.util import inspect +import io import pydoc import py_compile import keyword @@ -555,6 +556,14 @@ class object | ... and 82 other subclasses """ doc = pydoc.TextDoc() + try: + # Make sure HeapType, which has no __module__ attribute, is one + # of the known subclasses of object. (doc.docclass() used to + # fail if HeapType was imported before running this test, like + # when running tests sequentially.) + from _testcapi import HeapType + except ImportError: + pass text = doc.docclass(object) snip = (" | Built-in subclasses:\n" " | async_generator\n" @@ -899,6 +908,82 @@ def test_synopsis(self): synopsis = pydoc.synopsis(TESTFN, {}) self.assertEqual(synopsis, 'line 1: h\xe9') + def test_source_synopsis(self): + def check(source, expected, encoding=None): + if isinstance(source, str): + source_file = StringIO(source) + else: + source_file = io.TextIOWrapper(io.BytesIO(source), encoding=encoding) + with source_file: + result = pydoc.source_synopsis(source_file) + self.assertEqual(result, expected) + + check('"""Single line docstring."""', + 'Single line docstring.') + check('"""First line of docstring.\nSecond line.\nThird line."""', + 'First line of docstring.') + check('"""First line of docstring.\\nSecond line.\\nThird line."""', + 'First line of docstring.') + check('""" Whitespace around docstring. """', + 'Whitespace around docstring.') + check('import sys\n"""No docstring"""', + None) + check(' \n"""Docstring after empty line."""', + 'Docstring after empty line.') + check('# Comment\n"""Docstring after comment."""', + 'Docstring after comment.') + check(' # Indented comment\n"""Docstring after comment."""', + 'Docstring after comment.') + check('""""""', # Empty docstring + '') + check('', # Empty file + None) + check('"""Embedded\0null byte"""', + None) + check('"""Embedded null byte"""\0', + None) + check('"""Café and résumé."""', + 'Café and résumé.') + check("'''Triple single quotes'''", + 'Triple single quotes') + check('"Single double quotes"', + 'Single double quotes') + check("'Single single quotes'", + 'Single single quotes') + check('"""split\\\nline"""', + 'splitline') + check('"""Unrecognized escape \\sequence"""', + 'Unrecognized escape \\sequence') + check('"""Invalid escape seq\\uence"""', + None) + check('r"""Raw \\stri\\ng"""', + 'Raw \\stri\\ng') + check('b"""Bytes literal"""', + None) + check('f"""f-string"""', + None) + check('"""Concatenated""" \\\n"string" \'literals\'', + 'Concatenatedstringliterals') + check('"""String""" + """expression"""', + None) + check('("""In parentheses""")', + 'In parentheses') + check('("""Multiple lines """\n"""in parentheses""")', + 'Multiple lines in parentheses') + check('()', # tuple + None) + check(b'# coding: iso-8859-15\n"""\xa4uro sign"""', + '€uro sign', encoding='iso-8859-15') + check(b'"""\xa4"""', # Decoding error + None, encoding='utf-8') + + with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8') as temp_file: + temp_file.write('"""Real file test."""\n') + temp_file.flush() + temp_file.seek(0) + result = pydoc.source_synopsis(temp_file) + self.assertEqual(result, "Real file test.") + @requires_docstrings def test_synopsis_sourceless(self): os = import_helper.import_fresh_module('os') diff --git a/Lib/test/test_pyrepl/support.py b/Lib/test/test_pyrepl/support.py index 672d4896c92283..45e3bf758f17de 100644 --- a/Lib/test/test_pyrepl/support.py +++ b/Lib/test/test_pyrepl/support.py @@ -101,16 +101,6 @@ def handle_all_events( ) -def make_clean_env() -> dict[str, str]: - clean_env = os.environ.copy() - for k in clean_env.copy(): - if k.startswith("PYTHON"): - clean_env.pop(k) - clean_env.pop("FORCE_COLOR", None) - clean_env.pop("NO_COLOR", None) - return clean_env - - class FakeConsole(Console): def __init__(self, events, encoding="utf-8") -> None: self.events = iter(events) diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index f29a7ffbd7cafd..3540d2a5a41662 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -10,7 +10,7 @@ import tempfile from unittest import TestCase, skipUnless, skipIf from unittest.mock import patch -from test.support import force_not_colorized +from test.support import force_not_colorized, make_clean_env from test.support import SHORT_TIMEOUT from test.support.import_helper import import_module from test.support.os_helper import unlink @@ -23,7 +23,6 @@ multiline_input, code_to_events, clean_screen, - make_clean_env, ) from _pyrepl.console import Event from _pyrepl.readline import (ReadlineAlikeReader, ReadlineConfig, @@ -851,7 +850,7 @@ def test_global_namespace_completion(self): output = multiline_input(reader, namespace) self.assertEqual(output, "python") - def test_updown_arrow_with_completion_menu(self): + def test_up_down_arrow_with_completion_menu(self): """Up arrow in the middle of unfinished tab completion when the menu is displayed should work and trigger going back in history. Down arrow should subsequently get us back to the incomplete command.""" @@ -861,6 +860,7 @@ def test_updown_arrow_with_completion_menu(self): events = itertools.chain( code_to_events(code), [ + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), ], @@ -1324,23 +1324,35 @@ def test_readline_history_file(self): if readline.backend != "editline": self.skipTest("GNU readline is not affected by this issue") - hfile = tempfile.NamedTemporaryFile() - self.addCleanup(unlink, hfile.name) - env = os.environ.copy() - env["PYTHON_HISTORY"] = hfile.name + with tempfile.NamedTemporaryFile() as hfile: + env = os.environ.copy() + env["PYTHON_HISTORY"] = hfile.name - env["PYTHON_BASIC_REPL"] = "1" - output, exit_code = self.run_repl("spam \nexit()\n", env=env) - self.assertEqual(exit_code, 0) - self.assertIn("spam ", output) - self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) - self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text()) + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl("spam \nexit()\n", env=env) + self.assertEqual(exit_code, 0) + self.assertIn("spam ", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text()) - env.pop("PYTHON_BASIC_REPL", None) - output, exit_code = self.run_repl("exit\n", env=env) - self.assertEqual(exit_code, 0) - self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text()) + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl("exit\n", env=env) + self.assertEqual(exit_code, 0) + self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text()) def test_keyboard_interrupt_after_isearch(self): output, exit_code = self.run_repl(["\x12", "\x03", "exit"]) self.assertEqual(exit_code, 0) + + def test_prompt_after_help(self): + output, exit_code = self.run_repl(["help", "q", "exit"]) + + # Regex pattern to remove ANSI escape sequences + ansi_escape = re.compile(r"(\x1B(=|>|(\[)[0-?]*[ -\/]*[@-~]))") + cleaned_output = ansi_escape.sub("", output) + self.assertEqual(exit_code, 0) + + # Ensure that we don't see multiple prompts after exiting `help` + # Extra stuff (newline and `exit` rewrites) are necessary + # because of how run_repl works. + self.assertNotIn(">>> \n>>> >>>", cleaned_output) diff --git a/Lib/test/test_pyrepl/test_reader.py b/Lib/test/test_pyrepl/test_reader.py index 6c72a1d39c55df..27c6d6664eda9e 100644 --- a/Lib/test/test_pyrepl/test_reader.py +++ b/Lib/test/test_pyrepl/test_reader.py @@ -4,7 +4,7 @@ from unittest import TestCase from unittest.mock import MagicMock -from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader +from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader, prepare_console from _pyrepl.console import Event from _pyrepl.reader import Reader @@ -295,8 +295,8 @@ def test_completions_updated_on_key_press(self): actual = reader.screen self.assertEqual(len(actual), 2) - self.assertEqual(actual[0].rstrip(), "itertools.accumulate(") - self.assertEqual(actual[1], f"{code}a") + self.assertEqual(actual[0], f"{code}a") + self.assertEqual(actual[1].rstrip(), "itertools.accumulate(") def test_key_press_on_tab_press_once(self): namespace = {"itertools": itertools} @@ -312,3 +312,10 @@ def test_key_press_on_tab_press_once(self): reader, _ = handle_all_events(events, prepare_reader=completing_reader) self.assert_screen_equals(reader, f"{code}a") + + def test_pos2xy_with_no_columns(self): + console = prepare_console([]) + reader = prepare_reader(console) + # Simulate a resize to 0 columns + reader.screeninfo = [] + self.assertEqual(reader.pos2xy(), (0, 0)) diff --git a/Lib/test/test_pyrepl/test_unix_console.py b/Lib/test/test_pyrepl/test_unix_console.py index e3bbabcb0089fb..15dbf48bcf0f1c 100644 --- a/Lib/test/test_pyrepl/test_unix_console.py +++ b/Lib/test/test_pyrepl/test_unix_console.py @@ -1,7 +1,9 @@ import itertools +import os import sys import unittest from functools import partial +from test.support import os_helper from unittest import TestCase from unittest.mock import MagicMock, call, patch, ANY @@ -312,3 +314,14 @@ def same_console(events): ) console.restore() con.restore() + + def test_getheightwidth_with_invalid_environ(self, _os_write): + # gh-128636 + console = UnixConsole() + with os_helper.EnvironmentVarGuard() as env: + env["LINES"] = "" + self.assertIsInstance(console.getheightwidth(), tuple) + env["COLUMNS"] = "" + self.assertIsInstance(console.getheightwidth(), tuple) + os.environ = [] + self.assertIsInstance(console.getheightwidth(), tuple) diff --git a/Lib/test/test_pyrepl/test_windows_console.py b/Lib/test/test_pyrepl/test_windows_console.py index 4a3b2baf64a944..07eaccd1124cd6 100644 --- a/Lib/test/test_pyrepl/test_windows_console.py +++ b/Lib/test/test_pyrepl/test_windows_console.py @@ -329,6 +329,20 @@ def move_right(self, cols=1): def erase_in_line(self): return ERASE_IN_LINE.encode("utf8") + def test_multiline_ctrl_z(self): + # see gh-126332 + code = "abcdefghi" + + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data='\x1a', raw=bytearray(b'\x1a')), + Event(evt="key", data='\x1a', raw=bytearray(b'\x1a')), + ], + ) + reader, _ = self.handle_events_narrow(events) + self.assertEqual(reader.cxy, (2, 3)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index ab46ccbf004a3a..969f483814d08d 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -792,6 +792,7 @@ def test_finds_expected_number_of_tests(self): f'{", ".join(output.splitlines())}') +@support.force_not_colorized_test_class class ProgramsTestCase(BaseTestCase): """ Test various ways to run the Python test suite. Use options close @@ -905,6 +906,7 @@ def test_pcbuild_rt(self): self.run_batch(script, *rt_args, *self.regrtest_args, *self.tests) +@support.force_not_colorized_test_class class ArgsTestCase(BaseTestCase): """ Test arguments of the Python test suite. @@ -1183,7 +1185,7 @@ def test_run(self): stats=TestStats(4, 1), forever=True) - @support.without_optimizer + @support.requires_jit_disabled def check_leak(self, code, what, *, run_workers=False): test = self.create_test('huntrleaks', code=code) @@ -2145,25 +2147,25 @@ def check_add_python_opts(self, option): import unittest from test import support try: - from _testinternalcapi import get_config + from _testcapi import config_get except ImportError: - get_config = None + config_get = None # WASI/WASM buildbots don't use -E option use_environment = (support.is_emscripten or support.is_wasi) class WorkerTests(unittest.TestCase): - @unittest.skipUnless(get_config is None, 'need get_config()') + @unittest.skipUnless(config_get is None, 'need config_get()') def test_config(self): - config = get_config()['config'] + config = config_get() # -u option - self.assertEqual(config['buffered_stdio'], 0) + self.assertEqual(config_get('buffered_stdio'), 0) # -W default option - self.assertTrue(config['warnoptions'], ['default']) + self.assertTrue(config_get('warnoptions'), ['default']) # -bb option - self.assertTrue(config['bytes_warning'], 2) + self.assertTrue(config_get('bytes_warning'), 2) # -E option - self.assertTrue(config['use_environment'], use_environment) + self.assertTrue(config_get('use_environment'), use_environment) def test_python_opts(self): # -u option diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index e764e60560db23..356ff5b198d637 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -70,6 +70,7 @@ def run_on_interactive_mode(source): return output +@support.force_not_colorized_test_class class TestInteractiveInterpreter(unittest.TestCase): @cpython_only @@ -273,6 +274,8 @@ def test_asyncio_repl_is_ok(self): self.assertEqual(exit_code, 0, "".join(output)) + +@support.force_not_colorized_test_class class TestInteractiveModeSyntaxErrors(unittest.TestCase): def test_interactive_syntax_error_correct_line(self): diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py index b64383f6546f31..ada78ec8e6b0c7 100644 --- a/Lib/test/test_runpy.py +++ b/Lib/test/test_runpy.py @@ -12,8 +12,14 @@ import textwrap import unittest import warnings -from test.support import (infinite_recursion, no_tracing, verbose, - requires_subprocess, requires_resource) +from test.support import ( + force_not_colorized_test_class, + infinite_recursion, + no_tracing, + requires_resource, + requires_subprocess, + verbose, +) from test.support.import_helper import forget, make_legacy_pyc, unload from test.support.os_helper import create_empty_file, temp_dir, FakePath from test.support.script_helper import make_script, make_zip_script @@ -758,6 +764,7 @@ def test_encoding(self): self.assertEqual(result['s'], "non-ASCII: h\xe9") +@force_not_colorized_test_class class TestExit(unittest.TestCase): STATUS_CONTROL_C_EXIT = 0xC000013A EXPECTED_CODE = ( diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 1f18b1f09b5858..078ddd6c431b37 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -3239,12 +3239,8 @@ def test_filesystem_full(self): self.assertRaises(OSError, self.zerocopy_fun, src, dst) -@unittest.skipIf(not SUPPORTS_SENDFILE, 'os.sendfile() not supported') -class TestZeroCopySendfile(_ZeroCopyFileTest, unittest.TestCase): - PATCHPOINT = "os.sendfile" - - def zerocopy_fun(self, fsrc, fdst): - return shutil._fastcopy_sendfile(fsrc, fdst) +class _ZeroCopyFileLinuxTest(_ZeroCopyFileTest): + BLOCKSIZE_INDEX = None def test_non_regular_file_src(self): with io.BytesIO(self.FILEDATA) as src: @@ -3265,65 +3261,65 @@ def test_non_regular_file_dst(self): self.assertEqual(dst.read(), self.FILEDATA) def test_exception_on_second_call(self): - def sendfile(*args, **kwargs): + def syscall(*args, **kwargs): if not flag: flag.append(None) - return orig_sendfile(*args, **kwargs) + return orig_syscall(*args, **kwargs) else: raise OSError(errno.EBADF, "yo") flag = [] - orig_sendfile = os.sendfile - with unittest.mock.patch('os.sendfile', create=True, - side_effect=sendfile): + orig_syscall = eval(self.PATCHPOINT) + with unittest.mock.patch(self.PATCHPOINT, create=True, + side_effect=syscall): with self.get_files() as (src, dst): with self.assertRaises(OSError) as cm: - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert flag self.assertEqual(cm.exception.errno, errno.EBADF) def test_cant_get_size(self): # Emulate a case where src file size cannot be determined. # Internally bufsize will be set to a small value and - # sendfile() will be called repeatedly. + # a system call will be called repeatedly. with unittest.mock.patch('os.fstat', side_effect=OSError) as m: with self.get_files() as (src, dst): - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert m.called self.assertEqual(read_file(TESTFN2, binary=True), self.FILEDATA) def test_small_chunks(self): # Force internal file size detection to be smaller than the - # actual file size. We want to force sendfile() to be called + # actual file size. We want to force a system call to be called # multiple times, also in order to emulate a src fd which gets # bigger while it is being copied. mock = unittest.mock.Mock() mock.st_size = 65536 + 1 with unittest.mock.patch('os.fstat', return_value=mock) as m: with self.get_files() as (src, dst): - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert m.called self.assertEqual(read_file(TESTFN2, binary=True), self.FILEDATA) def test_big_chunk(self): # Force internal file size detection to be +100MB bigger than - # the actual file size. Make sure sendfile() does not rely on + # the actual file size. Make sure a system call does not rely on # file size value except for (maybe) a better throughput / # performance. mock = unittest.mock.Mock() mock.st_size = self.FILESIZE + (100 * 1024 * 1024) with unittest.mock.patch('os.fstat', return_value=mock) as m: with self.get_files() as (src, dst): - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert m.called self.assertEqual(read_file(TESTFN2, binary=True), self.FILEDATA) def test_blocksize_arg(self): - with unittest.mock.patch('os.sendfile', + with unittest.mock.patch(self.PATCHPOINT, side_effect=ZeroDivisionError) as m: self.assertRaises(ZeroDivisionError, shutil.copyfile, TESTFN, TESTFN2) - blocksize = m.call_args[0][3] + blocksize = m.call_args[0][self.BLOCKSIZE_INDEX] # Make sure file size and the block size arg passed to # sendfile() are the same. self.assertEqual(blocksize, os.path.getsize(TESTFN)) @@ -3333,9 +3329,19 @@ def test_blocksize_arg(self): self.addCleanup(os_helper.unlink, TESTFN2 + '3') self.assertRaises(ZeroDivisionError, shutil.copyfile, TESTFN2, TESTFN2 + '3') - blocksize = m.call_args[0][3] + blocksize = m.call_args[0][self.BLOCKSIZE_INDEX] self.assertEqual(blocksize, 2 ** 23) + +@unittest.skipIf(not SUPPORTS_SENDFILE, 'os.sendfile() not supported') +@unittest.mock.patch.object(shutil, "_USE_CP_COPY_FILE_RANGE", False) +class TestZeroCopySendfile(_ZeroCopyFileLinuxTest, unittest.TestCase): + PATCHPOINT = "os.sendfile" + BLOCKSIZE_INDEX = 3 + + def zerocopy_fun(self, fsrc, fdst): + return shutil._fastcopy_sendfile(fsrc, fdst) + def test_file2file_not_supported(self): # Emulate a case where sendfile() only support file->socket # fds. In such a case copyfile() is supposed to skip the @@ -3358,6 +3364,29 @@ def test_file2file_not_supported(self): shutil._USE_CP_SENDFILE = True +@unittest.skipUnless(shutil._USE_CP_COPY_FILE_RANGE, "os.copy_file_range() not supported") +class TestZeroCopyCopyFileRange(_ZeroCopyFileLinuxTest, unittest.TestCase): + PATCHPOINT = "os.copy_file_range" + BLOCKSIZE_INDEX = 2 + + def zerocopy_fun(self, fsrc, fdst): + return shutil._fastcopy_copy_file_range(fsrc, fdst) + + def test_empty_file(self): + srcname = f"{TESTFN}src" + dstname = f"{TESTFN}dst" + self.addCleanup(lambda: os_helper.unlink(srcname)) + self.addCleanup(lambda: os_helper.unlink(dstname)) + with open(srcname, "wb"): + pass + + with open(srcname, "rb") as src, open(dstname, "wb") as dst: + # _fastcopy_copy_file_range gives up copying empty files due + # to a bug in older Linux. + with self.assertRaises(shutil._GiveupOnFastCopy): + self.zerocopy_fun(src, dst) + + @unittest.skipIf(not MACOS, 'macOS only') class TestZeroCopyMACOS(_ZeroCopyFileTest, unittest.TestCase): PATCHPOINT = "posix._fcopyfile" diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index 704a0090bdbc0f..72a01cd1e451f4 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -253,9 +253,7 @@ def test_invalid_socket(self): self.assertRaises((ValueError, OSError), signal.set_wakeup_fd, fd) - # Emscripten does not support fstat on pipes yet. - # https://github.com/emscripten-core/emscripten/issues/16414 - @unittest.skipIf(support.is_emscripten, "Emscripten cannot fstat pipes.") + @unittest.skipIf(support.is_emscripten, "Fixed in next Emscripten release after 4.0.1") @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") def test_set_wakeup_fd_result(self): r1, w1 = os.pipe() @@ -274,7 +272,7 @@ def test_set_wakeup_fd_result(self): self.assertEqual(signal.set_wakeup_fd(-1), w2) self.assertEqual(signal.set_wakeup_fd(-1), -1) - @unittest.skipIf(support.is_emscripten, "Emscripten cannot fstat pipes.") + @unittest.skipIf(support.is_emscripten, "Fixed in next Emscripten release after 4.0.1") @unittest.skipUnless(support.has_socket_support, "needs working sockets.") def test_set_wakeup_fd_socket_result(self): sock1 = socket.socket() @@ -295,7 +293,7 @@ def test_set_wakeup_fd_socket_result(self): # On Windows, files are always blocking and Windows does not provide a # function to test if a socket is in non-blocking mode. @unittest.skipIf(sys.platform == "win32", "tests specific to POSIX") - @unittest.skipIf(support.is_emscripten, "Emscripten cannot fstat pipes.") + @unittest.skipIf(support.is_emscripten, "Fixed in next Emscripten release after 4.0.1") @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") def test_set_wakeup_fd_blocking(self): rfd, wfd = os.pipe() @@ -385,7 +383,7 @@ def handler(signum, frame): except ZeroDivisionError: # An ignored exception should have been printed out on stderr err = err.getvalue() - if ('Exception ignored when trying to write to the signal wakeup fd' + if ('Exception ignored while trying to write to the signal wakeup fd' not in err): raise AssertionError(err) if ('OSError: [Errno %d]' % errno.EBADF) not in err: @@ -574,7 +572,7 @@ def handler(signum, frame): signal.raise_signal(signum) err = err.getvalue() - if ('Exception ignored when trying to {action} to the signal wakeup fd' + if ('Exception ignored while trying to {action} to the signal wakeup fd' not in err): raise AssertionError(err) """.format(action=action) @@ -644,7 +642,7 @@ def handler(signum, frame): "buffer" % written) # By default, we get a warning when a signal arrives - msg = ('Exception ignored when trying to {action} ' + msg = ('Exception ignored while trying to {action} ' 'to the signal wakeup fd') signal.set_wakeup_fd(write.fileno()) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index faf326d9164e1b..b77fa3cb21512a 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -520,6 +520,8 @@ def clientTearDown(self): @unittest.skipIf(WSL, 'VSOCK does not work on Microsoft WSL') @unittest.skipUnless(HAVE_SOCKET_VSOCK, 'VSOCK sockets required for this test.') +@unittest.skipUnless(get_cid() != 2, # VMADDR_CID_HOST + "This test can only be run on a virtual guest.") class ThreadedVSOCKSocketStreamTest(unittest.TestCase, ThreadableTest): def __init__(self, methodName='runTest'): diff --git a/Lib/test/test_sqlite3/test_cli.py b/Lib/test/test_sqlite3/test_cli.py index d014a9ce841607..dcd90d11d46819 100644 --- a/Lib/test/test_sqlite3/test_cli.py +++ b/Lib/test/test_sqlite3/test_cli.py @@ -90,14 +90,14 @@ def test_interact(self): out, err = self.run_cli() self.assertIn(self.MEMORY_DB_MSG, err) self.assertIn(self.MEMORY_DB_MSG, err) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) self.assertEqual(out.count(self.PS1), 1) self.assertEqual(out.count(self.PS2), 0) def test_interact_quit(self): out, err = self.run_cli(commands=(".quit",)) self.assertIn(self.MEMORY_DB_MSG, err) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) self.assertEqual(out.count(self.PS1), 1) self.assertEqual(out.count(self.PS2), 0) @@ -105,7 +105,7 @@ def test_interact_version(self): out, err = self.run_cli(commands=(".version",)) self.assertIn(self.MEMORY_DB_MSG, err) self.assertIn(sqlite3.sqlite_version + "\n", out) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) self.assertEqual(out.count(self.PS1), 2) self.assertEqual(out.count(self.PS2), 0) self.assertIn(sqlite3.sqlite_version, out) @@ -114,14 +114,14 @@ def test_interact_valid_sql(self): out, err = self.run_cli(commands=("SELECT 1;",)) self.assertIn(self.MEMORY_DB_MSG, err) self.assertIn("(1,)\n", out) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) self.assertEqual(out.count(self.PS1), 2) self.assertEqual(out.count(self.PS2), 0) def test_interact_incomplete_multiline_sql(self): out, err = self.run_cli(commands=("SELECT 1",)) self.assertIn(self.MEMORY_DB_MSG, err) - self.assertTrue(out.endswith(self.PS2)) + self.assertEndsWith(out, self.PS2) self.assertEqual(out.count(self.PS1), 1) self.assertEqual(out.count(self.PS2), 1) @@ -130,7 +130,7 @@ def test_interact_valid_multiline_sql(self): self.assertIn(self.MEMORY_DB_MSG, err) self.assertIn(self.PS2, out) self.assertIn("(1,)\n", out) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) self.assertEqual(out.count(self.PS1), 2) self.assertEqual(out.count(self.PS2), 1) @@ -138,7 +138,7 @@ def test_interact_invalid_sql(self): out, err = self.run_cli(commands=("sel;",)) self.assertIn(self.MEMORY_DB_MSG, err) self.assertIn("OperationalError (SQLITE_ERROR)", err) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) self.assertEqual(out.count(self.PS1), 2) self.assertEqual(out.count(self.PS2), 0) @@ -147,7 +147,7 @@ def test_interact_on_disk_file(self): out, err = self.run_cli(TESTFN, commands=("CREATE TABLE t(t);",)) self.assertIn(TESTFN, err) - self.assertTrue(out.endswith(self.PS1)) + self.assertEndsWith(out, self.PS1) out, _ = self.run_cli(TESTFN, commands=("SELECT count(t) FROM t;",)) self.assertIn("(0,)\n", out) diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py index 488b401fb0054d..f5ffe2427430e2 100644 --- a/Lib/test/test_sqlite3/test_dbapi.py +++ b/Lib/test/test_sqlite3/test_dbapi.py @@ -59,45 +59,34 @@ def test_param_style(self): sqlite.paramstyle) def test_warning(self): - self.assertTrue(issubclass(sqlite.Warning, Exception), - "Warning is not a subclass of Exception") + self.assertIsSubclass(sqlite.Warning, Exception) def test_error(self): - self.assertTrue(issubclass(sqlite.Error, Exception), - "Error is not a subclass of Exception") + self.assertIsSubclass(sqlite.Error, Exception) def test_interface_error(self): - self.assertTrue(issubclass(sqlite.InterfaceError, sqlite.Error), - "InterfaceError is not a subclass of Error") + self.assertIsSubclass(sqlite.InterfaceError, sqlite.Error) def test_database_error(self): - self.assertTrue(issubclass(sqlite.DatabaseError, sqlite.Error), - "DatabaseError is not a subclass of Error") + self.assertIsSubclass(sqlite.DatabaseError, sqlite.Error) def test_data_error(self): - self.assertTrue(issubclass(sqlite.DataError, sqlite.DatabaseError), - "DataError is not a subclass of DatabaseError") + self.assertIsSubclass(sqlite.DataError, sqlite.DatabaseError) def test_operational_error(self): - self.assertTrue(issubclass(sqlite.OperationalError, sqlite.DatabaseError), - "OperationalError is not a subclass of DatabaseError") + self.assertIsSubclass(sqlite.OperationalError, sqlite.DatabaseError) def test_integrity_error(self): - self.assertTrue(issubclass(sqlite.IntegrityError, sqlite.DatabaseError), - "IntegrityError is not a subclass of DatabaseError") + self.assertIsSubclass(sqlite.IntegrityError, sqlite.DatabaseError) def test_internal_error(self): - self.assertTrue(issubclass(sqlite.InternalError, sqlite.DatabaseError), - "InternalError is not a subclass of DatabaseError") + self.assertIsSubclass(sqlite.InternalError, sqlite.DatabaseError) def test_programming_error(self): - self.assertTrue(issubclass(sqlite.ProgrammingError, sqlite.DatabaseError), - "ProgrammingError is not a subclass of DatabaseError") + self.assertIsSubclass(sqlite.ProgrammingError, sqlite.DatabaseError) def test_not_supported_error(self): - self.assertTrue(issubclass(sqlite.NotSupportedError, - sqlite.DatabaseError), - "NotSupportedError is not a subclass of DatabaseError") + self.assertIsSubclass(sqlite.NotSupportedError, sqlite.DatabaseError) def test_module_constants(self): consts = [ @@ -274,7 +263,7 @@ def test_module_constants(self): consts.append("SQLITE_IOERR_CORRUPTFS") for const in consts: with self.subTest(const=const): - self.assertTrue(hasattr(sqlite, const)) + self.assertHasAttr(sqlite, const) def test_error_code_on_exception(self): err_msg = "unable to open database file" @@ -288,7 +277,7 @@ def test_error_code_on_exception(self): sqlite.connect(db) e = cm.exception self.assertEqual(e.sqlite_errorcode, err_code) - self.assertTrue(e.sqlite_errorname.startswith("SQLITE_CANTOPEN")) + self.assertStartsWith(e.sqlite_errorname, "SQLITE_CANTOPEN") def test_extended_error_code_on_exception(self): with memory_database() as con: @@ -425,7 +414,7 @@ def test_connection_exceptions(self): ] for exc in exceptions: with self.subTest(exc=exc): - self.assertTrue(hasattr(self.cx, exc)) + self.assertHasAttr(self.cx, exc) self.assertIs(getattr(sqlite, exc), getattr(self.cx, exc)) def test_interrupt_on_closed_db(self): diff --git a/Lib/test/test_sqlite3/test_factory.py b/Lib/test/test_sqlite3/test_factory.py index 48d35b54a2e239..cc9f1ec5c4bec5 100644 --- a/Lib/test/test_sqlite3/test_factory.py +++ b/Lib/test/test_sqlite3/test_factory.py @@ -280,7 +280,7 @@ def test_custom(self): austria = "Österreich" row = self.con.execute("select ?", (austria,)).fetchone() self.assertEqual(type(row[0]), str, "type of row[0] must be unicode") - self.assertTrue(row[0].endswith("reich"), "column must contain original data") + self.assertEndsWith(row[0], "reich", "column must contain original data") class TextFactoryTestsWithEmbeddedZeroBytes(unittest.TestCase): diff --git a/Lib/test/test_sqlite3/test_hooks.py b/Lib/test/test_sqlite3/test_hooks.py index 49e72f8fcfbcbd..53b8a39bf29a75 100644 --- a/Lib/test/test_sqlite3/test_hooks.py +++ b/Lib/test/test_sqlite3/test_hooks.py @@ -196,7 +196,7 @@ def progress(): con.execute("select 1 union select 2 union select 3").fetchall() self.assertEqual(action, 0, "progress handler was not cleared") - @with_tracebacks(ZeroDivisionError, name="bad_progress") + @with_tracebacks(ZeroDivisionError, msg_regex="bad_progress") def test_error_in_progress_handler(self): def bad_progress(): 1 / 0 @@ -206,7 +206,7 @@ def bad_progress(): create table foo(a, b) """) - @with_tracebacks(ZeroDivisionError, name="bad_progress") + @with_tracebacks(ZeroDivisionError, msg_regex="bad_progress") def test_error_in_progress_handler_result(self): class BadBool: def __bool__(self): diff --git a/Lib/test/test_sqlite3/test_userfunctions.py b/Lib/test/test_sqlite3/test_userfunctions.py index c6c3db159add64..5bb2eff55ebc8f 100644 --- a/Lib/test/test_sqlite3/test_userfunctions.py +++ b/Lib/test/test_sqlite3/test_userfunctions.py @@ -254,7 +254,7 @@ def test_func_return_nan(self): cur.execute("select returnnan()") self.assertIsNone(cur.fetchone()[0]) - @with_tracebacks(ZeroDivisionError, name="func_raiseexception") + @with_tracebacks(ZeroDivisionError, msg_regex="func_raiseexception") def test_func_exception(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -262,14 +262,14 @@ def test_func_exception(self): cur.fetchone() self.assertEqual(str(cm.exception), 'user-defined function raised exception') - @with_tracebacks(MemoryError, name="func_memoryerror") + @with_tracebacks(MemoryError, msg_regex="func_memoryerror") def test_func_memory_error(self): cur = self.con.cursor() with self.assertRaises(MemoryError): cur.execute("select memoryerror()") cur.fetchone() - @with_tracebacks(OverflowError, name="func_overflowerror") + @with_tracebacks(OverflowError, msg_regex="func_overflowerror") def test_func_overflow_error(self): cur = self.con.cursor() with self.assertRaises(sqlite.DataError): @@ -389,7 +389,7 @@ def test_func_return_too_large_int(self): with self.assertRaisesRegex(sqlite.DataError, msg): cur.execute("select largeint()") - @with_tracebacks(UnicodeEncodeError, "surrogates not allowed", "chr") + @with_tracebacks(UnicodeEncodeError, "surrogates not allowed") def test_func_return_text_with_surrogates(self): cur = self.con.cursor() self.con.create_function("pychr", 1, chr) @@ -641,7 +641,7 @@ def test_aggr_error_on_create(self): with self.assertRaises(sqlite.OperationalError): self.con.create_function("bla", -100, AggrSum) - @with_tracebacks(AttributeError, name="AggrNoStep") + @with_tracebacks(AttributeError, msg_regex="AggrNoStep") def test_aggr_no_step(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -656,7 +656,7 @@ def test_aggr_no_finalize(self): cur.execute("select nofinalize(t) from test") val = cur.fetchone()[0] - @with_tracebacks(ZeroDivisionError, name="AggrExceptionInInit") + @with_tracebacks(ZeroDivisionError, msg_regex="AggrExceptionInInit") def test_aggr_exception_in_init(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -664,7 +664,7 @@ def test_aggr_exception_in_init(self): val = cur.fetchone()[0] self.assertEqual(str(cm.exception), "user-defined aggregate's '__init__' method raised error") - @with_tracebacks(ZeroDivisionError, name="AggrExceptionInStep") + @with_tracebacks(ZeroDivisionError, msg_regex="AggrExceptionInStep") def test_aggr_exception_in_step(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -672,7 +672,7 @@ def test_aggr_exception_in_step(self): val = cur.fetchone()[0] self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error") - @with_tracebacks(ZeroDivisionError, name="AggrExceptionInFinalize") + @with_tracebacks(ZeroDivisionError, msg_regex="AggrExceptionInFinalize") def test_aggr_exception_in_finalize(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -822,11 +822,11 @@ def authorizer_cb(action, arg1, arg2, dbname, source): raise ValueError return sqlite.SQLITE_OK - @with_tracebacks(ValueError, name="authorizer_cb") + @with_tracebacks(ValueError, msg_regex="authorizer_cb") def test_table_access(self): super().test_table_access() - @with_tracebacks(ValueError, name="authorizer_cb") + @with_tracebacks(ValueError, msg_regex="authorizer_cb") def test_column_access(self): super().test_table_access() diff --git a/Lib/test/test_sqlite3/util.py b/Lib/test/test_sqlite3/util.py index 5599823838beea..8643835cca46e2 100644 --- a/Lib/test/test_sqlite3/util.py +++ b/Lib/test/test_sqlite3/util.py @@ -22,15 +22,16 @@ def cx_limit(cx, category=sqlite3.SQLITE_LIMIT_SQL_LENGTH, limit=128): cx.setlimit(category, _prev) -def with_tracebacks(exc, regex="", name=""): +def with_tracebacks(exc, regex="", name="", msg_regex=""): """Convenience decorator for testing callback tracebacks.""" def decorator(func): - _regex = re.compile(regex) if regex else None + exc_regex = re.compile(regex) if regex else None + _msg_regex = re.compile(msg_regex) if msg_regex else None @functools.wraps(func) def wrapper(self, *args, **kwargs): with test.support.catch_unraisable_exception() as cm: # First, run the test with traceback enabled. - with check_tracebacks(self, cm, exc, _regex, name): + with check_tracebacks(self, cm, exc, exc_regex, _msg_regex, name): func(self, *args, **kwargs) # Then run the test with traceback disabled. @@ -40,7 +41,7 @@ def wrapper(self, *args, **kwargs): @contextlib.contextmanager -def check_tracebacks(self, cm, exc, regex, obj_name): +def check_tracebacks(self, cm, exc, exc_regex, msg_regex, obj_name): """Convenience context manager for testing callback tracebacks.""" sqlite3.enable_callback_tracebacks(True) try: @@ -49,9 +50,12 @@ def check_tracebacks(self, cm, exc, regex, obj_name): yield self.assertEqual(cm.unraisable.exc_type, exc) - if regex: + if exc_regex: msg = str(cm.unraisable.exc_value) - self.assertIsNotNone(regex.search(msg)) + self.assertIsNotNone(exc_regex.search(msg), (exc_regex, msg)) + if msg_regex: + msg = cm.unraisable.err_msg + self.assertIsNotNone(msg_regex.search(msg), (msg_regex, msg)) if obj_name: self.assertEqual(cm.unraisable.object.__name__, obj_name) finally: diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index c16ef3f96f9a21..9863f3ffe97656 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -1325,8 +1325,7 @@ def test_load_verify_cadata(self): def test_load_dh_params(self): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ctx.load_dh_params(DHFILE) - if os.name != 'nt': - ctx.load_dh_params(BYTES_DHFILE) + ctx.load_dh_params(BYTES_DHFILE) self.assertRaises(TypeError, ctx.load_dh_params) self.assertRaises(TypeError, ctx.load_dh_params, None) with self.assertRaises(FileNotFoundError) as cm: diff --git a/Lib/test/test_stable_abi_ctypes.py b/Lib/test/test_stable_abi_ctypes.py index fa08dc6a25b0ea..f3724ce6d4d15a 100644 --- a/Lib/test/test_stable_abi_ctypes.py +++ b/Lib/test/test_stable_abi_ctypes.py @@ -901,6 +901,8 @@ def test_windows_feature_macros(self): "Py_MakePendingCalls", "Py_NewInterpreter", "Py_NewRef", + "Py_PACK_FULL_VERSION", + "Py_PACK_VERSION", "Py_REFCNT", "Py_ReprEnter", "Py_ReprLeave", diff --git a/Lib/test/test_string.py b/Lib/test/test_string.py index 824b89ad517c12..f6d112d8a93ec4 100644 --- a/Lib/test/test_string.py +++ b/Lib/test/test_string.py @@ -1,6 +1,7 @@ import unittest import string from string import Template +import types class ModuleTest(unittest.TestCase): @@ -101,6 +102,24 @@ def test_index_lookup(self): with self.assertRaises(KeyError): fmt.format("{0[2]}{0[0]}", {}) + def test_auto_numbering_lookup(self): + fmt = string.Formatter() + namespace = types.SimpleNamespace(foo=types.SimpleNamespace(bar='baz')) + widths = [None, types.SimpleNamespace(qux=4)] + self.assertEqual( + fmt.format("{.foo.bar:{[1].qux}}", namespace, widths), 'baz ') + + def test_auto_numbering_reenterability(self): + class ReenteringFormatter(string.Formatter): + def format_field(self, value, format_spec): + if format_spec.isdigit() and int(format_spec) > 0: + return self.format('{:{}}!', value, int(format_spec) - 1) + else: + return super().format_field(value, format_spec) + fmt = ReenteringFormatter() + x = types.SimpleNamespace(a='X') + self.assertEqual(fmt.format('{.a:{}}', x, 3), 'X!!!') + def test_override_get_value(self): class NamespaceFormatter(string.Formatter): def __init__(self, namespace={}): diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index c7c6f684cd33f0..f56195ca27672c 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -116,7 +116,9 @@ def test_eval_str_invalid_escape(self): warnings.simplefilter('always', category=SyntaxWarning) eval("'''\n\\z'''") self.assertEqual(len(w), 1) - self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") + self.assertEqual(str(w[0].message), r'"\z" is an invalid escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -126,7 +128,8 @@ def test_eval_str_invalid_escape(self): eval("'''\n\\z'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid escape sequence '\z'") + self.assertEqual(exc.msg, r'"\z" is an invalid escape sequence. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) self.assertEqual(exc.offset, 1) @@ -153,7 +156,9 @@ def test_eval_str_invalid_octal_escape(self): eval("'''\n\\407'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), - r"invalid octal escape sequence '\407'") + r'"\407" is an invalid octal escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -163,7 +168,8 @@ def test_eval_str_invalid_octal_escape(self): eval("'''\n\\407'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid octal escape sequence '\407'") + self.assertEqual(exc.msg, r'"\407" is an invalid octal escape sequence. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) self.assertEqual(exc.offset, 1) @@ -205,7 +211,9 @@ def test_eval_bytes_invalid_escape(self): warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\z'''") self.assertEqual(len(w), 1) - self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") + self.assertEqual(str(w[0].message), r'"\z" is an invalid escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -215,7 +223,8 @@ def test_eval_bytes_invalid_escape(self): eval("b'''\n\\z'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid escape sequence '\z'") + self.assertEqual(exc.msg, r'"\z" is an invalid escape sequence. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) @@ -228,8 +237,9 @@ def test_eval_bytes_invalid_octal_escape(self): warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\407'''") self.assertEqual(len(w), 1) - self.assertEqual(str(w[0].message), - r"invalid octal escape sequence '\407'") + self.assertEqual(str(w[0].message), r'"\407" is an invalid octal escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -239,7 +249,8 @@ def test_eval_bytes_invalid_octal_escape(self): eval("b'''\n\\407'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid octal escape sequence '\407'") + self.assertEqual(exc.msg, r'"\407" is an invalid octal escape sequence. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index 149016635522c3..5cef612a340be9 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -9,9 +9,6 @@ from test.support import import_helper, threading_helper -ADAPTIVE_WARMUP_DELAY = 2 - - class A: def f(self): return 'A' @@ -466,7 +463,8 @@ def test(name): super(MyType, type(mytype)).__setattr__(mytype, "bar", 1) self.assertEqual(mytype.bar, 1) - for _ in range(ADAPTIVE_WARMUP_DELAY): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): test("foo1") def test_reassigned_new(self): @@ -485,7 +483,8 @@ class C(B): def __new__(cls): return super().__new__(cls) - for _ in range(ADAPTIVE_WARMUP_DELAY): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): C() def test_mixed_staticmethod_hierarchy(self): @@ -505,7 +504,8 @@ class C(B): def some(cls): return super().some(cls) - for _ in range(ADAPTIVE_WARMUP_DELAY): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): C.some(C) @threading_helper.requires_working_threading() diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index d839893d2c657e..39857445a02255 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1621,7 +1621,7 @@ def func(): return sys._getframe() x = func() if support.Py_GIL_DISABLED: - INTERPRETER_FRAME = '10PhcP' + INTERPRETER_FRAME = '9PihcP' else: INTERPRETER_FRAME = '9PhcP' check(x, size('3PiccPP' + INTERPRETER_FRAME + 'P')) diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index ce504dc21af85f..3738914cf17de8 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -20,7 +20,7 @@ from test.support.import_helper import import_module from test.support.os_helper import (TESTFN, unlink, skip_unless_symlink, change_cwd) -from test.support.venv import VirtualEnvironment +from test.support.venv import VirtualEnvironmentMixin import sysconfig from sysconfig import (get_paths, get_platform, get_config_vars, @@ -37,7 +37,7 @@ HAS_USER_BASE = sysconfig._HAS_USER_BASE -class TestSysConfig(unittest.TestCase): +class TestSysConfig(unittest.TestCase, VirtualEnvironmentMixin): def setUp(self): super(TestSysConfig, self).setUp() @@ -111,13 +111,6 @@ def _cleanup_testfn(self): elif os.path.isdir(path): shutil.rmtree(path) - def venv(self, **venv_create_args): - return VirtualEnvironment.from_tmpdir( - prefix=f'{self.id()}-venv-', - **venv_create_args, - ) - - def test_get_path_names(self): self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS) @@ -650,8 +643,21 @@ def test_sysconfigdata_json(self): system_config_vars = get_config_vars() - # Ignore keys in the check - for key in ('projectbase', 'srcdir'): + ignore_keys = set() + # Keys dependent on Python being run outside the build directrory + if sysconfig.is_python_build(): + ignore_keys |= {'srcdir'} + # Keys dependent on the executable location + if os.path.dirname(sys.executable) != system_config_vars['BINDIR']: + ignore_keys |= {'projectbase'} + # Keys dependent on the environment (different inside virtual environments) + if sys.prefix != sys.base_prefix: + ignore_keys |= {'prefix', 'exec_prefix', 'base', 'platbase'} + # Keys dependent on Python being run from the prefix targetted when building (different on relocatable installs) + if sysconfig._installation_is_relocated(): + ignore_keys |= {'prefix', 'exec_prefix', 'base', 'platbase', 'installed_base', 'installed_platbase'} + + for key in ignore_keys: json_config_vars.pop(key) system_config_vars.pop(key) @@ -711,5 +717,38 @@ def test_parse_makefile(self): }) +class DeprecationTests(unittest.TestCase): + def deprecated(self, removal_version, deprecation_msg=None, error=Exception, error_msg=None): + if sys.version_info >= removal_version: + return self.assertRaises(error, msg=error_msg) + else: + return self.assertWarns(DeprecationWarning, msg=deprecation_msg) + + def test_expand_makefile_vars(self): + with self.deprecated( + removal_version=(3, 16), + deprecation_msg=( + 'sysconfig.expand_makefile_vars is deprecated and will be removed in ' + 'Python 3.16. Use sysconfig.get_paths(vars=...) instead.', + ), + error=AttributeError, + error_msg="module 'sysconfig' has no attribute 'expand_makefile_vars'", + ): + sysconfig.expand_makefile_vars('', {}) + + def test_is_python_build_check_home(self): + with self.deprecated( + removal_version=(3, 15), + deprecation_msg=( + 'The check_home argument of sysconfig.is_python_build is ' + 'deprecated and its value is ignored. ' + 'It will be removed in Python 3.15.' + ), + error=TypeError, + error_msg="is_python_build() takes 0 positional arguments but 1 were given", + ): + sysconfig.is_python_build('foo') + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 3e164a12581dd1..214e1ba0b53dd2 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -2130,6 +2130,15 @@ def test_set_name(self): # Test long non-ASCII name (truncated) "x" * (limit - 1) + "é€", + + # Test long non-BMP names (truncated) creating surrogate pairs + # on Windows + "x" * (limit - 1) + "\U0010FFFF", + "x" * (limit - 2) + "\U0010FFFF" * 2, + "x" + "\U0001f40d" * limit, + "xx" + "\U0001f40d" * limit, + "xxx" + "\U0001f40d" * limit, + "xxxx" + "\U0001f40d" * limit, ] if os_helper.FS_NONASCII: tests.append(f"nonascii:{os_helper.FS_NONASCII}") @@ -2146,15 +2155,31 @@ def work(): work_name = _thread._get_name() for name in tests: - encoded = name.encode(encoding, "replace") - if b'\0' in encoded: - encoded = encoded.split(b'\0', 1)[0] - if truncate is not None: - encoded = encoded[:truncate] - if sys.platform.startswith("solaris"): - expected = encoded.decode("utf-8", "surrogateescape") + if not support.MS_WINDOWS: + encoded = name.encode(encoding, "replace") + if b'\0' in encoded: + encoded = encoded.split(b'\0', 1)[0] + if truncate is not None: + encoded = encoded[:truncate] + if sys.platform.startswith("solaris"): + expected = encoded.decode("utf-8", "surrogateescape") + else: + expected = os.fsdecode(encoded) else: - expected = os.fsdecode(encoded) + size = 0 + chars = [] + for ch in name: + if ord(ch) > 0xFFFF: + size += 2 + else: + size += 1 + if size > truncate: + break + chars.append(ch) + expected = ''.join(chars) + + if '\0' in expected: + expected = expected.split('\0', 1)[0] with self.subTest(name=name, expected=expected): work_name = None diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index 1c540bed33c71e..1147997d8d86bf 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -158,10 +158,19 @@ def test_conversions(self): self.assertEqual(int(time.mktime(time.localtime(self.t))), int(self.t)) - def test_sleep(self): + def test_sleep_exceptions(self): + self.assertRaises(TypeError, time.sleep, []) + self.assertRaises(TypeError, time.sleep, "a") + self.assertRaises(TypeError, time.sleep, complex(0, 0)) + self.assertRaises(ValueError, time.sleep, -2) self.assertRaises(ValueError, time.sleep, -1) - time.sleep(1.2) + self.assertRaises(ValueError, time.sleep, -0.1) + + def test_sleep(self): + for value in [-0.0, 0, 0.0, 1e-100, 1e-9, 1e-6, 1, 1.2]: + with self.subTest(value=value): + time.sleep(value) def test_epoch(self): # bpo-43869: Make sure that Python use the same Epoch on all platforms: diff --git a/Lib/test/test_tkinter/test_misc.py b/Lib/test/test_tkinter/test_misc.py index 3362169391818b..96ea3f0117ca03 100644 --- a/Lib/test/test_tkinter/test_misc.py +++ b/Lib/test/test_tkinter/test_misc.py @@ -31,12 +31,20 @@ def test_repr(self): self.assertEqual(repr(f), '') def test_generated_names(self): + class Button2(tkinter.Button): + pass + t = tkinter.Toplevel(self.root) f = tkinter.Frame(t) f2 = tkinter.Frame(t) + self.assertNotEqual(str(f), str(f2)) b = tkinter.Button(f2) - for name in str(b).split('.'): + b2 = Button2(f2) + for name in str(b).split('.') + str(b2).split('.'): self.assertFalse(name.isidentifier(), msg=repr(name)) + b3 = tkinter.Button(f2) + b4 = Button2(f2) + self.assertEqual(len({str(b), str(b2), str(b3), str(b4)}), 4) @requires_tk(8, 6, 6) def test_tk_busy(self): diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 75710db7d05375..480bff743a9f8a 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1,4 +1,5 @@ import os +import re import token import tokenize import unittest @@ -1819,6 +1820,22 @@ def test_iter_compat(self): self.assertEqual(tokenize.untokenize(iter(tokens)), b'Hello ') +def contains_ambiguous_backslash(source): + """Return `True` if the source contains a backslash on a + line by itself. For example: + + a = (1 + \\ + ) + + Code like this cannot be untokenized exactly. This is because + the tokenizer does not produce any tokens for the line containing + the backslash and so there is no way to know its indent. + """ + pattern = re.compile(br'\n\s*\\\r?\n') + return pattern.search(source) is not None + + class TestRoundtrip(TestCase): def check_roundtrip(self, f): @@ -1829,6 +1846,9 @@ def check_roundtrip(self, f): tokenize.untokenize(), and the latter tokenized again to 2-tuples. The test fails if the 3 pair tokenizations do not match. + If the source code can be untokenized unambiguously, the + untokenized code must match the original code exactly. + When untokenize bugs are fixed, untokenize with 5-tuples should reproduce code that does not contain a backslash continuation following spaces. A proper test should test this. @@ -1852,6 +1872,13 @@ def check_roundtrip(self, f): tokens2_from5 = [tok[:2] for tok in tokenize.tokenize(readline5)] self.assertEqual(tokens2_from5, tokens2) + if not contains_ambiguous_backslash(code): + # The BOM does not produce a token so there is no way to preserve it. + code_without_bom = code.removeprefix(b'\xef\xbb\xbf') + readline = iter(code_without_bom.splitlines(keepends=True)).__next__ + untokenized_code = tokenize.untokenize(tokenize.tokenize(readline)) + self.assertEqual(code_without_bom, untokenized_code) + def check_line_extraction(self, f): if isinstance(f, str): code = f.encode('utf-8') diff --git a/Lib/test/test_tomllib/test_misc.py b/Lib/test/test_tomllib/test_misc.py index 9e677a337a2835..59116afa1f36ad 100644 --- a/Lib/test/test_tomllib/test_misc.py +++ b/Lib/test/test_tomllib/test_misc.py @@ -5,6 +5,7 @@ import copy import datetime from decimal import Decimal as D +import importlib from pathlib import Path import sys import tempfile @@ -113,3 +114,11 @@ def test_inline_table_recursion_limit(self): nest_count=nest_count): recursive_table_toml = nest_count * "key = {" + nest_count * "}" tomllib.loads(recursive_table_toml) + + def test_types_import(self): + """Test that `_types` module runs. + + The module is for type annotations only, so it is otherwise + never imported by tests. + """ + importlib.import_module(f"{tomllib.__name__}._types") diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 31f0a61d6a9d59..89980ae6f8573a 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -21,7 +21,7 @@ from test.support.os_helper import TESTFN, unlink from test.support.script_helper import assert_python_ok, assert_python_failure from test.support.import_helper import forget -from test.support import force_not_colorized +from test.support import force_not_colorized, force_not_colorized_test_class import json import textwrap @@ -86,7 +86,7 @@ def test_caret(self): err = self.get_exception_format(self.syntax_error_with_caret, SyntaxError) self.assertEqual(len(err), 4) - self.assertTrue(err[1].strip() == "return x!") + self.assertEqual(err[1].strip(), "return x!") self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[1].find("!"), err[2].find("^")) # in the right place self.assertEqual(err[2].count("^"), 1) @@ -376,6 +376,30 @@ def f(): ' ValueError: 0\n', ]) + def test_format_exception_group_syntax_error_with_custom_values(self): + # See https://github.com/python/cpython/issues/128894 + for exc in [ + SyntaxError('error', 'abcd'), + SyntaxError('error', [None] * 4), + SyntaxError('error', (1, 2, 3, 4)), + SyntaxError('error', (1, 2, 3, 4)), + SyntaxError('error', (1, 'a', 'b', 2)), + # with end_lineno and end_offset: + SyntaxError('error', 'abcdef'), + SyntaxError('error', [None] * 6), + SyntaxError('error', (1, 2, 3, 4, 5, 6)), + SyntaxError('error', (1, 'a', 'b', 2, 'c', 'd')), + ]: + with self.subTest(exc=exc): + err = traceback.format_exception_only(exc, show_group=True) + # Should not raise an exception: + if exc.lineno is not None: + self.assertEqual(len(err), 2) + self.assertTrue(err[0].startswith(' File')) + else: + self.assertEqual(len(err), 1) + self.assertEqual(err[-1], 'SyntaxError: error\n') + @requires_subprocess() @force_not_colorized def test_encoded_file(self): @@ -419,16 +443,10 @@ def do_test(firstlines, message, charset, lineno): err_line = "raise RuntimeError('{0}')".format(message_ascii) err_msg = "RuntimeError: {0}".format(message_ascii) - self.assertIn(("line %s" % lineno), stdout[1], - "Invalid line number: {0!r} instead of {1}".format( - stdout[1], lineno)) - self.assertTrue(stdout[2].endswith(err_line), - "Invalid traceback line: {0!r} instead of {1!r}".format( - stdout[2], err_line)) + self.assertIn("line %s" % lineno, stdout[1]) + self.assertEndsWith(stdout[2], err_line) actual_err_msg = stdout[3] - self.assertTrue(actual_err_msg == err_msg, - "Invalid error message: {0!r} instead of {1!r}".format( - actual_err_msg, err_msg)) + self.assertEqual(actual_err_msg, err_msg) do_test("", "foo", "ascii", 3) for charset in ("ascii", "iso-8859-1", "utf-8", "GBK"): @@ -1712,6 +1730,7 @@ def f(): @requires_debug_ranges() +@force_not_colorized_test_class class PurePythonTracebackErrorCaretTests( PurePythonExceptionFormattingMixin, TracebackErrorLocationCaretTestBase, @@ -1725,6 +1744,7 @@ class PurePythonTracebackErrorCaretTests( @cpython_only @requires_debug_ranges() +@force_not_colorized_test_class class CPythonTracebackErrorCaretTests( CAPIExceptionFormattingMixin, TracebackErrorLocationCaretTestBase, @@ -1736,6 +1756,7 @@ class CPythonTracebackErrorCaretTests( @cpython_only @requires_debug_ranges() +@force_not_colorized_test_class class CPythonTracebackLegacyErrorCaretTests( CAPIExceptionFormattingLegacyMixin, TracebackErrorLocationCaretTestBase, @@ -1806,9 +1827,9 @@ def check_traceback_format(self, cleanup_func=None): banner = tb_lines[0] self.assertEqual(len(tb_lines), 5) location, source_line = tb_lines[-2], tb_lines[-1] - self.assertTrue(banner.startswith('Traceback')) - self.assertTrue(location.startswith(' File')) - self.assertTrue(source_line.startswith(' raise')) + self.assertStartsWith(banner, 'Traceback') + self.assertStartsWith(location, ' File') + self.assertStartsWith(source_line, ' raise') def test_traceback_format(self): self.check_traceback_format() @@ -2149,10 +2170,12 @@ def test_print_exception_bad_type_python(self): boundaries = re.compile( '(%s|%s)' % (re.escape(cause_message), re.escape(context_message))) +@force_not_colorized_test_class class TestTracebackFormat(unittest.TestCase, TracebackFormatMixin): pass @cpython_only +@force_not_colorized_test_class class TestFallbackTracebackFormat(unittest.TestCase, TracebackFormatMixin): DEBUG_RANGES = False def setUp(self) -> None: @@ -2185,12 +2208,12 @@ def zero_div(self): def check_zero_div(self, msg): lines = msg.splitlines() if has_no_debug_ranges(): - self.assertTrue(lines[-3].startswith(' File')) + self.assertStartsWith(lines[-3], ' File') self.assertIn('1/0 # In zero_div', lines[-2]) else: - self.assertTrue(lines[-4].startswith(' File')) + self.assertStartsWith(lines[-4], ' File') self.assertIn('1/0 # In zero_div', lines[-3]) - self.assertTrue(lines[-1].startswith('ZeroDivisionError'), lines[-1]) + self.assertStartsWith(lines[-1], 'ZeroDivisionError') def test_simple(self): try: @@ -2200,12 +2223,12 @@ def test_simple(self): lines = self.get_report(e).splitlines() if has_no_debug_ranges(): self.assertEqual(len(lines), 4) - self.assertTrue(lines[3].startswith('ZeroDivisionError')) + self.assertStartsWith(lines[3], 'ZeroDivisionError') else: self.assertEqual(len(lines), 5) - self.assertTrue(lines[4].startswith('ZeroDivisionError')) - self.assertTrue(lines[0].startswith('Traceback')) - self.assertTrue(lines[1].startswith(' File')) + self.assertStartsWith(lines[4], 'ZeroDivisionError') + self.assertStartsWith(lines[0], 'Traceback') + self.assertStartsWith(lines[1], ' File') self.assertIn('1/0 # Marker', lines[2]) def test_cause(self): @@ -2246,9 +2269,9 @@ def test_context_suppression(self): e = _ lines = self.get_report(e).splitlines() self.assertEqual(len(lines), 4) - self.assertTrue(lines[3].startswith('ZeroDivisionError')) - self.assertTrue(lines[0].startswith('Traceback')) - self.assertTrue(lines[1].startswith(' File')) + self.assertStartsWith(lines[3], 'ZeroDivisionError') + self.assertStartsWith(lines[0], 'Traceback') + self.assertStartsWith(lines[1], ' File') self.assertIn('ZeroDivisionError from None', lines[2]) def test_cause_and_context(self): @@ -2914,6 +2937,33 @@ def exc(): report = self.get_report(exc) self.assertEqual(report, expected) + def test_exception_group_wrapped_naked(self): + # See gh-128799 + + def exc(): + try: + raise Exception(42) + except* Exception as e: + raise + + expected = (f' + Exception Group Traceback (most recent call last):\n' + f' | File "{__file__}", line {self.callable_line}, in get_exception\n' + f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' + f' | File "{__file__}", line {exc.__code__.co_firstlineno + 3}, in exc\n' + f' | except* Exception as e:\n' + f' | raise\n' + f' | ExceptionGroup: (1 sub-exception)\n' + f' +-+---------------- 1 ----------------\n' + f' | Traceback (most recent call last):\n' + f' | File "{__file__}", line {exc.__code__.co_firstlineno + 2}, in exc\n' + f' | raise Exception(42)\n' + f' | Exception: 42\n' + f' +------------------------------------\n') + + report = self.get_report(exc) + self.assertEqual(report, expected) + def test_KeyboardInterrupt_at_first_line_of_frame(self): # see GH-93249 def f(): @@ -2940,6 +2990,7 @@ def f(): self.assertEqual(report, expected) +@force_not_colorized_test_class class PyExcReportingTests(BaseExceptionReportingTests, unittest.TestCase): # # This checks reporting through the 'traceback' module, with both @@ -2956,6 +3007,7 @@ def get_report(self, e): return s +@force_not_colorized_test_class class CExcReportingTests(BaseExceptionReportingTests, unittest.TestCase): # # This checks built-in reporting by the interpreter. diff --git a/Lib/test/test_tracemalloc.py b/Lib/test/test_tracemalloc.py index 5755f7697de91a..0220a83d24b428 100644 --- a/Lib/test/test_tracemalloc.py +++ b/Lib/test/test_tracemalloc.py @@ -1,14 +1,16 @@ import contextlib import os import sys +import textwrap import tracemalloc import unittest from unittest.mock import patch from test.support.script_helper import (assert_python_ok, assert_python_failure, interpreter_requires_environment) from test import support -from test.support import os_helper from test.support import force_not_colorized +from test.support import os_helper +from test.support import threading_helper try: import _testcapi @@ -18,6 +20,7 @@ _testinternalcapi = None +DEFAULT_DOMAIN = 0 EMPTY_STRING_SIZE = sys.getsizeof(b'') INVALID_NFRAME = (-1, 2**30) @@ -952,7 +955,6 @@ def check_env_var_invalid(self, nframe): return self.fail(f"unexpected output: {stderr!a}") - def test_env_var_invalid(self): for nframe in INVALID_NFRAME: with self.subTest(nframe=nframe): @@ -981,6 +983,7 @@ def check_sys_xoptions_invalid(self, nframe): return self.fail(f"unexpected output: {stderr!a}") + @force_not_colorized def test_sys_xoptions_invalid(self): for nframe in INVALID_NFRAME: with self.subTest(nframe=nframe): @@ -1026,8 +1029,8 @@ def track(self, release_gil=False, nframe=1): release_gil) return frames - def untrack(self): - _testcapi.tracemalloc_untrack(self.domain, self.ptr) + def untrack(self, release_gil=False): + _testcapi.tracemalloc_untrack(self.domain, self.ptr, release_gil) def get_traced_memory(self): # Get the traced size in the domain @@ -1069,7 +1072,7 @@ def test_track_already_tracked(self): self.assertEqual(self.get_traceback(), tracemalloc.Traceback(frames)) - def test_untrack(self): + def check_untrack(self, release_gil): tracemalloc.start() self.track() @@ -1077,13 +1080,19 @@ def test_untrack(self): self.assertEqual(self.get_traced_memory(), self.size) # untrack must remove the trace - self.untrack() + self.untrack(release_gil) self.assertIsNone(self.get_traceback()) self.assertEqual(self.get_traced_memory(), 0) # calling _PyTraceMalloc_Untrack() multiple times must not crash - self.untrack() - self.untrack() + self.untrack(release_gil) + self.untrack(release_gil) + + def test_untrack(self): + self.check_untrack(False) + + def test_untrack_without_gil(self): + self.check_untrack(True) def test_stop_track(self): tracemalloc.start() @@ -1101,6 +1110,37 @@ def test_stop_untrack(self): with self.assertRaises(RuntimeError): self.untrack() + @unittest.skipIf(_testcapi is None, 'need _testcapi') + @threading_helper.requires_working_threading() + # gh-128679: Test crash on a debug build (especially on FreeBSD). + @unittest.skipIf(support.Py_DEBUG, 'need release build') + def test_tracemalloc_track_race(self): + # gh-128679: Test fix for tracemalloc.stop() race condition + _testcapi.tracemalloc_track_race() + + def test_late_untrack(self): + code = textwrap.dedent(f""" + from test import support + import tracemalloc + import _testcapi + + class Tracked: + def __init__(self, domain, size): + self.domain = domain + self.ptr = id(self) + self.size = size + _testcapi.tracemalloc_track(self.domain, self.ptr, self.size) + + def __del__(self, untrack=_testcapi.tracemalloc_untrack): + untrack(self.domain, self.ptr, 1) + + domain = {DEFAULT_DOMAIN} + tracemalloc.start() + obj = Tracked(domain, 1024 * 1024) + support.late_deletion(obj) + """) + assert_python_ok("-c", code) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_turtle.py b/Lib/test/test_turtle.py index c75a002a89b4c4..d02cac284a909a 100644 --- a/Lib/test/test_turtle.py +++ b/Lib/test/test_turtle.py @@ -1,9 +1,9 @@ import os import pickle import re +import tempfile import unittest import unittest.mock -import tempfile from test import support from test.support import import_helper from test.support import os_helper @@ -54,6 +54,21 @@ """ +def patch_screen(): + """Patch turtle._Screen for testing without a display. + + We must patch the _Screen class itself instead of the _Screen + instance because instantiating it requires a display. + """ + return unittest.mock.patch( + "turtle._Screen.__new__", + **{ + "return_value.__class__": turtle._Screen, + "return_value.mode.return_value": "standard", + }, + ) + + class TurtleConfigTest(unittest.TestCase): def get_cfg_file(self, cfg_str): @@ -513,7 +528,7 @@ def test_save_overwrites_if_specified(self) -> None: turtle.TurtleScreen.save(screen, file_path, overwrite=True) with open(file_path) as f: - assert f.read() == "postscript" + self.assertEqual(f.read(), "postscript") def test_save(self) -> None: screen = unittest.mock.Mock() @@ -524,7 +539,101 @@ def test_save(self) -> None: turtle.TurtleScreen.save(screen, file_path) with open(file_path) as f: - assert f.read() == "postscript" + self.assertEqual(f.read(), "postscript") + + def test_no_animation_sets_tracer_0(self): + s = turtle.TurtleScreen(cv=unittest.mock.MagicMock()) + + with s.no_animation(): + self.assertEqual(s.tracer(), 0) + + def test_no_animation_resets_tracer_to_old_value(self): + s = turtle.TurtleScreen(cv=unittest.mock.MagicMock()) + + for tracer in [0, 1, 5]: + s.tracer(tracer) + with s.no_animation(): + pass + self.assertEqual(s.tracer(), tracer) + + def test_no_animation_calls_update_at_exit(self): + s = turtle.TurtleScreen(cv=unittest.mock.MagicMock()) + s.update = unittest.mock.MagicMock() + + with s.no_animation(): + s.update.assert_not_called() + s.update.assert_called_once() + + +class TestTurtle(unittest.TestCase): + def setUp(self): + with patch_screen(): + self.turtle = turtle.Turtle() + + # Reset the Screen singleton to avoid reference leaks + self.addCleanup(setattr, turtle.Turtle, '_screen', None) + + def test_begin_end_fill(self): + self.assertFalse(self.turtle.filling()) + self.turtle.begin_fill() + self.assertTrue(self.turtle.filling()) + self.turtle.end_fill() + self.assertFalse(self.turtle.filling()) + + def test_fill(self): + # The context manager behaves like begin_fill and end_fill. + self.assertFalse(self.turtle.filling()) + with self.turtle.fill(): + self.assertTrue(self.turtle.filling()) + self.assertFalse(self.turtle.filling()) + + def test_fill_resets_after_exception(self): + # The context manager cleans up correctly after exceptions. + try: + with self.turtle.fill(): + self.assertTrue(self.turtle.filling()) + raise ValueError + except ValueError: + self.assertFalse(self.turtle.filling()) + + def test_fill_context_when_filling(self): + # The context manager works even when the turtle is already filling. + self.turtle.begin_fill() + self.assertTrue(self.turtle.filling()) + with self.turtle.fill(): + self.assertTrue(self.turtle.filling()) + self.assertFalse(self.turtle.filling()) + + def test_begin_end_poly(self): + self.assertFalse(self.turtle._creatingPoly) + self.turtle.begin_poly() + self.assertTrue(self.turtle._creatingPoly) + self.turtle.end_poly() + self.assertFalse(self.turtle._creatingPoly) + + def test_poly(self): + # The context manager behaves like begin_poly and end_poly. + self.assertFalse(self.turtle._creatingPoly) + with self.turtle.poly(): + self.assertTrue(self.turtle._creatingPoly) + self.assertFalse(self.turtle._creatingPoly) + + def test_poly_resets_after_exception(self): + # The context manager cleans up correctly after exceptions. + try: + with self.turtle.poly(): + self.assertTrue(self.turtle._creatingPoly) + raise ValueError + except ValueError: + self.assertFalse(self.turtle._creatingPoly) + + def test_poly_context_when_creating_poly(self): + # The context manager works when the turtle is already creating poly. + self.turtle.begin_poly() + self.assertTrue(self.turtle._creatingPoly) + with self.turtle.poly(): + self.assertTrue(self.turtle._creatingPoly) + self.assertFalse(self.turtle._creatingPoly) class TestModuleLevel(unittest.TestCase): diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py index e109a65741309a..ee64f89358ed55 100644 --- a/Lib/test/test_type_cache.py +++ b/Lib/test/test_type_cache.py @@ -131,7 +131,7 @@ def _all_opnames(self, func): return set(instr.opname for instr in dis.Bytecode(func, adaptive=True)) def _check_specialization(self, func, arg, opname, *, should_specialize): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): func(arg) if should_specialize: diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index ef3cfc9517085e..f002d28df60e9c 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -45,6 +45,7 @@ import textwrap import typing import weakref +import warnings import types from test.support import captured_stderr, cpython_only, infinite_recursion, requires_docstrings, import_helper, run_code @@ -58,20 +59,6 @@ class BaseTestCase(TestCase): - def assertIsSubclass(self, cls, class_or_tuple, msg=None): - if not issubclass(cls, class_or_tuple): - message = '%r is not a subclass of %r' % (cls, class_or_tuple) - if msg is not None: - message += ' : %s' % msg - raise self.failureException(message) - - def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): - if issubclass(cls, class_or_tuple): - message = '%r is a subclass of %r' % (cls, class_or_tuple) - if msg is not None: - message += ' : %s' % msg - raise self.failureException(message) - def clear_caches(self): for f in typing._cleanups: f() @@ -128,18 +115,18 @@ def test_errors(self): def test_can_subclass(self): class Mock(Any): pass - self.assertTrue(issubclass(Mock, Any)) + self.assertIsSubclass(Mock, Any) self.assertIsInstance(Mock(), Mock) class Something: pass - self.assertFalse(issubclass(Something, Any)) + self.assertNotIsSubclass(Something, Any) self.assertNotIsInstance(Something(), Mock) class MockSomething(Something, Mock): pass - self.assertTrue(issubclass(MockSomething, Any)) - self.assertTrue(issubclass(MockSomething, MockSomething)) - self.assertTrue(issubclass(MockSomething, Something)) - self.assertTrue(issubclass(MockSomething, Mock)) + self.assertIsSubclass(MockSomething, Any) + self.assertIsSubclass(MockSomething, MockSomething) + self.assertIsSubclass(MockSomething, Something) + self.assertIsSubclass(MockSomething, Mock) ms = MockSomething() self.assertIsInstance(ms, MockSomething) self.assertIsInstance(ms, Something) @@ -1251,10 +1238,6 @@ class Gen[*Ts]: ... class TypeVarTupleTests(BaseTestCase): - def assertEndsWith(self, string, tail): - if not string.endswith(tail): - self.fail(f"String {string!r} does not end with {tail!r}") - def test_name(self): Ts = TypeVarTuple('Ts') self.assertEqual(Ts.__name__, 'Ts') @@ -2014,11 +1997,11 @@ def test_basics(self): self.assertNotEqual(u, Union) def test_union_isinstance(self): - self.assertTrue(isinstance(42, Union[int, str])) - self.assertTrue(isinstance('abc', Union[int, str])) - self.assertFalse(isinstance(3.14, Union[int, str])) - self.assertTrue(isinstance(42, Union[int, list[int]])) - self.assertTrue(isinstance(42, Union[int, Any])) + self.assertIsInstance(42, Union[int, str]) + self.assertIsInstance('abc', Union[int, str]) + self.assertNotIsInstance(3.14, Union[int, str]) + self.assertIsInstance(42, Union[int, list[int]]) + self.assertIsInstance(42, Union[int, Any]) def test_union_isinstance_type_error(self): with self.assertRaises(TypeError): @@ -2035,9 +2018,9 @@ def test_union_isinstance_type_error(self): isinstance(42, Union[Any, str]) def test_optional_isinstance(self): - self.assertTrue(isinstance(42, Optional[int])) - self.assertTrue(isinstance(None, Optional[int])) - self.assertFalse(isinstance('abc', Optional[int])) + self.assertIsInstance(42, Optional[int]) + self.assertIsInstance(None, Optional[int]) + self.assertNotIsInstance('abc', Optional[int]) def test_optional_isinstance_type_error(self): with self.assertRaises(TypeError): @@ -2050,14 +2033,14 @@ def test_optional_isinstance_type_error(self): isinstance(None, Optional[Any]) def test_union_issubclass(self): - self.assertTrue(issubclass(int, Union[int, str])) - self.assertTrue(issubclass(str, Union[int, str])) - self.assertFalse(issubclass(float, Union[int, str])) - self.assertTrue(issubclass(int, Union[int, list[int]])) - self.assertTrue(issubclass(int, Union[int, Any])) - self.assertFalse(issubclass(int, Union[str, Any])) - self.assertTrue(issubclass(int, Union[Any, int])) - self.assertFalse(issubclass(int, Union[Any, str])) + self.assertIsSubclass(int, Union[int, str]) + self.assertIsSubclass(str, Union[int, str]) + self.assertNotIsSubclass(float, Union[int, str]) + self.assertIsSubclass(int, Union[int, list[int]]) + self.assertIsSubclass(int, Union[int, Any]) + self.assertNotIsSubclass(int, Union[str, Any]) + self.assertIsSubclass(int, Union[Any, int]) + self.assertNotIsSubclass(int, Union[Any, str]) def test_union_issubclass_type_error(self): with self.assertRaises(TypeError): @@ -2074,12 +2057,12 @@ def test_union_issubclass_type_error(self): issubclass(int, Union[list[int], str]) def test_optional_issubclass(self): - self.assertTrue(issubclass(int, Optional[int])) - self.assertTrue(issubclass(type(None), Optional[int])) - self.assertFalse(issubclass(str, Optional[int])) - self.assertTrue(issubclass(Any, Optional[Any])) - self.assertTrue(issubclass(type(None), Optional[Any])) - self.assertFalse(issubclass(int, Optional[Any])) + self.assertIsSubclass(int, Optional[int]) + self.assertIsSubclass(type(None), Optional[int]) + self.assertNotIsSubclass(str, Optional[int]) + self.assertIsSubclass(Any, Optional[Any]) + self.assertIsSubclass(type(None), Optional[Any]) + self.assertNotIsSubclass(int, Optional[Any]) def test_optional_issubclass_type_error(self): with self.assertRaises(TypeError): @@ -4067,8 +4050,8 @@ def test_generic_protocols_repr(self): class P(Protocol[T, S]): pass - self.assertTrue(repr(P[T, S]).endswith('P[~T, ~S]')) - self.assertTrue(repr(P[int, str]).endswith('P[int, str]')) + self.assertEndsWith(repr(P[T, S]), 'P[~T, ~S]') + self.assertEndsWith(repr(P[int, str]), 'P[int, str]') def test_generic_protocols_eq(self): T = TypeVar('T') @@ -4658,8 +4641,7 @@ class C(Generic[T]): self.assertNotEqual(Z, Y[int]) self.assertNotEqual(Z, Y[T]) - self.assertTrue(str(Z).endswith( - '.C[typing.Tuple[str, int]]')) + self.assertEndsWith(str(Z), '.C[typing.Tuple[str, int]]') def test_new_repr(self): T = TypeVar('T') @@ -4887,12 +4869,12 @@ class A(Generic[T]): self.assertNotEqual(typing.FrozenSet[A[str]], typing.FrozenSet[mod_generics_cache.B.A[str]]) - self.assertTrue(repr(Tuple[A[str]]).endswith('.A[str]]')) - self.assertTrue(repr(Tuple[B.A[str]]).endswith('.B.A[str]]')) - self.assertTrue(repr(Tuple[mod_generics_cache.A[str]]) - .endswith('mod_generics_cache.A[str]]')) - self.assertTrue(repr(Tuple[mod_generics_cache.B.A[str]]) - .endswith('mod_generics_cache.B.A[str]]')) + self.assertEndsWith(repr(Tuple[A[str]]), '.A[str]]') + self.assertEndsWith(repr(Tuple[B.A[str]]), '.B.A[str]]') + self.assertEndsWith(repr(Tuple[mod_generics_cache.A[str]]), + 'mod_generics_cache.A[str]]') + self.assertEndsWith(repr(Tuple[mod_generics_cache.B.A[str]]), + 'mod_generics_cache.B.A[str]]') def test_extended_generic_rules_eq(self): T = TypeVar('T') @@ -5182,6 +5164,18 @@ class C(B[int]): x = pickle.loads(z) self.assertEqual(s, x) + # Test ParamSpec args and kwargs + global PP + PP = ParamSpec('PP') + for thing in [PP.args, PP.kwargs]: + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(thing=thing, proto=proto): + self.assertEqual( + pickle.loads(pickle.dumps(thing, proto)), + thing, + ) + del PP + def test_copy_and_deepcopy(self): T = TypeVar('T') class Node(Generic[T]): ... @@ -5822,7 +5816,7 @@ def __call__(self, *args, **kwargs): @Wrapper def wrapped(): ... self.assertIsInstance(wrapped, Wrapper) - self.assertIs(False, hasattr(wrapped, "__final__")) + self.assertNotHasAttr(wrapped, "__final__") class Meta(type): @property @@ -5834,7 +5828,7 @@ class WithMeta(metaclass=Meta): ... # Builtin classes throw TypeError if you try to set an # attribute. final(int) - self.assertIs(False, hasattr(int, "__final__")) + self.assertNotHasAttr(int, "__final__") # Make sure it works with common builtin decorators class Methods: @@ -5915,19 +5909,19 @@ def static_method_bad_order(): self.assertEqual(Derived.class_method_good_order(), 42) self.assertIs(True, Derived.class_method_good_order.__override__) self.assertEqual(Derived.class_method_bad_order(), 42) - self.assertIs(False, hasattr(Derived.class_method_bad_order, "__override__")) + self.assertNotHasAttr(Derived.class_method_bad_order, "__override__") self.assertEqual(Derived.static_method_good_order(), 42) self.assertIs(True, Derived.static_method_good_order.__override__) self.assertEqual(Derived.static_method_bad_order(), 42) - self.assertIs(False, hasattr(Derived.static_method_bad_order, "__override__")) + self.assertNotHasAttr(Derived.static_method_bad_order, "__override__") # Base object is not changed: - self.assertIs(False, hasattr(Base.normal_method, "__override__")) - self.assertIs(False, hasattr(Base.class_method_good_order, "__override__")) - self.assertIs(False, hasattr(Base.class_method_bad_order, "__override__")) - self.assertIs(False, hasattr(Base.static_method_good_order, "__override__")) - self.assertIs(False, hasattr(Base.static_method_bad_order, "__override__")) + self.assertNotHasAttr(Base.normal_method, "__override__") + self.assertNotHasAttr(Base.class_method_good_order, "__override__") + self.assertNotHasAttr(Base.class_method_bad_order, "__override__") + self.assertNotHasAttr(Base.static_method_good_order, "__override__") + self.assertNotHasAttr(Base.static_method_bad_order, "__override__") def test_property(self): class Base: @@ -5952,8 +5946,8 @@ def wrong(self) -> int: self.assertEqual(instance.correct, 2) self.assertTrue(Child.correct.fget.__override__) self.assertEqual(instance.wrong, 2) - self.assertFalse(hasattr(Child.wrong, "__override__")) - self.assertFalse(hasattr(Child.wrong.fset, "__override__")) + self.assertNotHasAttr(Child.wrong, "__override__") + self.assertNotHasAttr(Child.wrong.fset, "__override__") def test_silent_failure(self): class CustomProp: @@ -5970,7 +5964,7 @@ def some(self): return 1 self.assertEqual(WithOverride.some, 1) - self.assertFalse(hasattr(WithOverride.some, "__override__")) + self.assertNotHasAttr(WithOverride.some, "__override__") def test_multiple_decorators(self): def with_wraps(f): # similar to `lru_cache` definition @@ -7140,6 +7134,25 @@ class C: self.assertEqual(get_type_hints(C, format=annotationlib.Format.STRING), {'x': 'undefined'}) + def test_get_type_hints_format_function(self): + def func(x: undefined) -> undefined: ... + + # VALUE + with self.assertRaises(NameError): + get_type_hints(func) + with self.assertRaises(NameError): + get_type_hints(func, format=annotationlib.Format.VALUE) + + # FORWARDREF + self.assertEqual( + get_type_hints(func, format=annotationlib.Format.FORWARDREF), + {'x': ForwardRef('undefined'), 'return': ForwardRef('undefined')}, + ) + + # STRING + self.assertEqual(get_type_hints(func, format=annotationlib.Format.STRING), + {'x': 'undefined', 'return': 'undefined'}) + class GetUtilitiesTestCase(TestCase): def test_get_origin(self): @@ -7242,6 +7255,51 @@ class C(Generic[T]): pass self.assertEqual(get_args(Unpack[tuple[Unpack[Ts]]]), (tuple[Unpack[Ts]],)) +class EvaluateForwardRefTests(BaseTestCase): + def test_evaluate_forward_ref(self): + int_ref = ForwardRef('int') + missing = ForwardRef('missing') + self.assertIs( + typing.evaluate_forward_ref(int_ref, type_params=()), + int, + ) + self.assertIs( + typing.evaluate_forward_ref( + int_ref, type_params=(), format=annotationlib.Format.FORWARDREF, + ), + int, + ) + self.assertIs( + typing.evaluate_forward_ref( + missing, type_params=(), format=annotationlib.Format.FORWARDREF, + ), + missing, + ) + self.assertEqual( + typing.evaluate_forward_ref( + int_ref, type_params=(), format=annotationlib.Format.STRING, + ), + 'int', + ) + + def test_evaluate_forward_ref_no_type_params(self): + ref = ForwardRef('int') + with self.assertWarnsRegex( + DeprecationWarning, + ( + "Failing to pass a value to the 'type_params' parameter " + "of 'typing.evaluate_forward_ref' is deprecated, " + "as it leads to incorrect behaviour" + ), + ): + typing.evaluate_forward_ref(ref) + + # No warnings when `type_params` is passed: + with warnings.catch_warnings(record=True) as w: + typing.evaluate_forward_ref(ref, type_params=()) + self.assertEqual(w, []) + + class CollectionsAbcTests(BaseTestCase): def test_hashable(self): @@ -8912,13 +8970,13 @@ class Child1(Base1): self.assertEqual(Child1.__mutable_keys__, frozenset({'b'})) class Base2(TypedDict): - a: ReadOnly[int] + a: int class Child2(Base2): - b: str + b: ReadOnly[str] - self.assertEqual(Child1.__readonly_keys__, frozenset({'a'})) - self.assertEqual(Child1.__mutable_keys__, frozenset({'b'})) + self.assertEqual(Child2.__readonly_keys__, frozenset({'b'})) + self.assertEqual(Child2.__mutable_keys__, frozenset({'a'})) def test_cannot_make_mutable_key_readonly(self): class Base(TypedDict): @@ -10129,6 +10187,18 @@ def test_valid_uses(self): self.assertEqual(C4.__args__, (Concatenate[int, T, P], T)) self.assertEqual(C4.__parameters__, (T, P)) + def test_invalid_uses(self): + with self.assertRaisesRegex(TypeError, 'Concatenate of no types'): + Concatenate[()] + with self.assertRaisesRegex( + TypeError, + ( + 'The last parameter to Concatenate should be a ' + 'ParamSpec variable or ellipsis' + ), + ): + Concatenate[int] + def test_var_substitution(self): T = TypeVar('T') P = ParamSpec('P') @@ -10403,7 +10473,7 @@ def test_special_attrs2(self): # to the variable name to which it is assigned". Thus, providing # __qualname__ is unnecessary. self.assertEqual(SpecialAttrsT.__name__, 'SpecialAttrsT') - self.assertFalse(hasattr(SpecialAttrsT, '__qualname__')) + self.assertNotHasAttr(SpecialAttrsT, '__qualname__') self.assertEqual(SpecialAttrsT.__module__, __name__) # Module-level type variables are picklable. for proto in range(pickle.HIGHEST_PROTOCOL + 1): @@ -10412,7 +10482,7 @@ def test_special_attrs2(self): self.assertIs(SpecialAttrsT, loaded) self.assertEqual(SpecialAttrsP.__name__, 'SpecialAttrsP') - self.assertFalse(hasattr(SpecialAttrsP, '__qualname__')) + self.assertNotHasAttr(SpecialAttrsP, '__qualname__') self.assertEqual(SpecialAttrsP.__module__, __name__) # Module-level ParamSpecs are picklable. for proto in range(pickle.HIGHEST_PROTOCOL + 1): diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py index c7d09a6b460c19..0285f0d51f2365 100644 --- a/Lib/test/test_unicodedata.py +++ b/Lib/test/test_unicodedata.py @@ -11,8 +11,14 @@ import sys import unicodedata import unittest -from test.support import (open_urlresource, requires_resource, script_helper, - cpython_only, check_disallow_instantiation) +from test.support import ( + open_urlresource, + requires_resource, + script_helper, + cpython_only, + check_disallow_instantiation, + force_not_colorized, +) class UnicodeMethodsTest(unittest.TestCase): @@ -277,6 +283,7 @@ def test_disallow_instantiation(self): # Ensure that the type disallows instantiation (bpo-43916) check_disallow_instantiation(self, unicodedata.UCD) + @force_not_colorized def test_failed_import_during_compiling(self): # Issue 4367 # Decoding \N escapes requires the unicodedata module. If it can't be diff --git a/Lib/test/test_unittest/test_case.py b/Lib/test/test_unittest/test_case.py index b4b2194a09cf9f..a04af55f3fc0ae 100644 --- a/Lib/test/test_unittest/test_case.py +++ b/Lib/test/test_unittest/test_case.py @@ -10,6 +10,7 @@ import inspect import types +from collections import UserString from copy import deepcopy from test import support @@ -54,6 +55,10 @@ def tearDown(self): self.events.append('tearDown') +class List(list): + pass + + class Test_TestCase(unittest.TestCase, TestEquality, TestHashing): ### Set up attributes used by inherited tests @@ -85,7 +90,7 @@ class Test(unittest.TestCase): def runTest(self): raise MyException() def test(self): pass - self.assertEqual(Test().id()[-13:], '.Test.runTest') + self.assertEndsWith(Test().id(), '.Test.runTest') # test that TestCase can be instantiated with no args # primarily for use at the interactive interpreter @@ -106,7 +111,7 @@ class Test(unittest.TestCase): def runTest(self): raise MyException() def test(self): pass - self.assertEqual(Test('test').id()[-10:], '.Test.test') + self.assertEndsWith(Test('test').id(), '.Test.test') # "class TestCase([methodName])" # ... @@ -347,7 +352,10 @@ async def test1(self): return 1 with self.assertWarns(DeprecationWarning) as w: + warnings.filterwarnings('ignore', + 'coroutine .* was never awaited', RuntimeWarning) Foo('test1').run() + support.gc_collect() self.assertIn('It is deprecated to return a value that is not None', str(w.warning)) self.assertIn('test1', str(w.warning)) self.assertEqual(w.filename, __file__) @@ -697,16 +705,136 @@ def testAssertIsNot(self): self.assertRaises(self.failureException, self.assertIsNot, thing, thing) def testAssertIsInstance(self): - thing = [] + thing = List() self.assertIsInstance(thing, list) - self.assertRaises(self.failureException, self.assertIsInstance, - thing, dict) + self.assertIsInstance(thing, (int, list)) + with self.assertRaises(self.failureException) as cm: + self.assertIsInstance(thing, int) + self.assertEqual(str(cm.exception), + "[] is not an instance of ") + with self.assertRaises(self.failureException) as cm: + self.assertIsInstance(thing, (int, float)) + self.assertEqual(str(cm.exception), + "[] is not an instance of any of (, )") + + with self.assertRaises(self.failureException) as cm: + self.assertIsInstance(thing, int, 'ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertIsInstance(thing, int, msg='ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) def testAssertNotIsInstance(self): - thing = [] - self.assertNotIsInstance(thing, dict) - self.assertRaises(self.failureException, self.assertNotIsInstance, - thing, list) + thing = List() + self.assertNotIsInstance(thing, int) + self.assertNotIsInstance(thing, (int, float)) + with self.assertRaises(self.failureException) as cm: + self.assertNotIsInstance(thing, list) + self.assertEqual(str(cm.exception), + "[] is an instance of ") + with self.assertRaises(self.failureException) as cm: + self.assertNotIsInstance(thing, (int, list)) + self.assertEqual(str(cm.exception), + "[] is an instance of ") + + with self.assertRaises(self.failureException) as cm: + self.assertNotIsInstance(thing, list, 'ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertNotIsInstance(thing, list, msg='ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertIsSubclass(self): + self.assertIsSubclass(List, list) + self.assertIsSubclass(List, (int, list)) + with self.assertRaises(self.failureException) as cm: + self.assertIsSubclass(List, int) + self.assertEqual(str(cm.exception), + f"{List!r} is not a subclass of ") + with self.assertRaises(self.failureException) as cm: + self.assertIsSubclass(List, (int, float)) + self.assertEqual(str(cm.exception), + f"{List!r} is not a subclass of any of (, )") + with self.assertRaises(self.failureException) as cm: + self.assertIsSubclass(1, int) + self.assertEqual(str(cm.exception), "1 is not a class") + + with self.assertRaises(self.failureException) as cm: + self.assertIsSubclass(List, int, 'ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertIsSubclass(List, int, msg='ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertNotIsSubclass(self): + self.assertNotIsSubclass(List, int) + self.assertNotIsSubclass(List, (int, float)) + with self.assertRaises(self.failureException) as cm: + self.assertNotIsSubclass(List, list) + self.assertEqual(str(cm.exception), + f"{List!r} is a subclass of ") + with self.assertRaises(self.failureException) as cm: + self.assertNotIsSubclass(List, (int, list)) + self.assertEqual(str(cm.exception), + f"{List!r} is a subclass of ") + with self.assertRaises(self.failureException) as cm: + self.assertNotIsSubclass(1, int) + self.assertEqual(str(cm.exception), "1 is not a class") + + with self.assertRaises(self.failureException) as cm: + self.assertNotIsSubclass(List, list, 'ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertNotIsSubclass(List, list, msg='ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertHasAttr(self): + a = List() + a.x = 1 + self.assertHasAttr(a, 'x') + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(a, 'y') + self.assertEqual(str(cm.exception), + "'List' object has no attribute 'y'") + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(List, 'spam') + self.assertEqual(str(cm.exception), + "type object 'List' has no attribute 'spam'") + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(sys, 'nonexistent') + self.assertEqual(str(cm.exception), + "module 'sys' has no attribute 'nonexistent'") + + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(a, 'y', 'ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(a, 'y', msg='ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertNotHasAttr(self): + a = List() + a.x = 1 + self.assertNotHasAttr(a, 'y') + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(a, 'x') + self.assertEqual(str(cm.exception), + "'List' object has unexpected attribute 'x'") + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(List, 'append') + self.assertEqual(str(cm.exception), + "type object 'List' has unexpected attribute 'append'") + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(sys, 'modules') + self.assertEqual(str(cm.exception), + "module 'sys' has unexpected attribute 'modules'") + + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(a, 'x', 'ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(a, 'x', msg='ababahalamaha') + self.assertIn('ababahalamaha', str(cm.exception)) def testAssertIn(self): animals = {'monkey': 'banana', 'cow': 'grass', 'seal': 'fish'} @@ -1861,6 +1989,186 @@ def testAssertNoLogsYieldsNone(self): pass self.assertIsNone(value) + def testAssertStartswith(self): + self.assertStartsWith('ababahalamaha', 'ababa') + self.assertStartsWith('ababahalamaha', ('x', 'ababa', 'y')) + self.assertStartsWith(UserString('ababahalamaha'), 'ababa') + self.assertStartsWith(UserString('ababahalamaha'), ('x', 'ababa', 'y')) + self.assertStartsWith(bytearray(b'ababahalamaha'), b'ababa') + self.assertStartsWith(bytearray(b'ababahalamaha'), (b'x', b'ababa', b'y')) + self.assertStartsWith(b'ababahalamaha', bytearray(b'ababa')) + self.assertStartsWith(b'ababahalamaha', + (bytearray(b'x'), bytearray(b'ababa'), bytearray(b'y'))) + + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith('ababahalamaha', 'amaha') + self.assertEqual(str(cm.exception), + "'ababahalamaha' doesn't start with 'amaha'") + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith('ababahalamaha', ('x', 'y')) + self.assertEqual(str(cm.exception), + "'ababahalamaha' doesn't start with any of ('x', 'y')") + + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith(b'ababahalamaha', 'ababa') + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith(b'ababahalamaha', ('amaha', 'ababa')) + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith([], 'ababa') + self.assertEqual(str(cm.exception), 'Expected str, not list') + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith('ababahalamaha', b'ababa') + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith('ababahalamaha', (b'amaha', b'ababa')) + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(TypeError): + self.assertStartsWith('ababahalamaha', ord('a')) + + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith('ababahalamaha', 'amaha', 'abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertStartsWith('ababahalamaha', 'amaha', msg='abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertNotStartswith(self): + self.assertNotStartsWith('ababahalamaha', 'amaha') + self.assertNotStartsWith('ababahalamaha', ('x', 'amaha', 'y')) + self.assertNotStartsWith(UserString('ababahalamaha'), 'amaha') + self.assertNotStartsWith(UserString('ababahalamaha'), ('x', 'amaha', 'y')) + self.assertNotStartsWith(bytearray(b'ababahalamaha'), b'amaha') + self.assertNotStartsWith(bytearray(b'ababahalamaha'), (b'x', b'amaha', b'y')) + self.assertNotStartsWith(b'ababahalamaha', bytearray(b'amaha')) + self.assertNotStartsWith(b'ababahalamaha', + (bytearray(b'x'), bytearray(b'amaha'), bytearray(b'y'))) + + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith('ababahalamaha', 'ababa') + self.assertEqual(str(cm.exception), + "'ababahalamaha' starts with 'ababa'") + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith('ababahalamaha', ('x', 'ababa', 'y')) + self.assertEqual(str(cm.exception), + "'ababahalamaha' starts with 'ababa'") + + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith(b'ababahalamaha', 'ababa') + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith(b'ababahalamaha', ('amaha', 'ababa')) + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith([], 'ababa') + self.assertEqual(str(cm.exception), 'Expected str, not list') + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith('ababahalamaha', b'ababa') + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith('ababahalamaha', (b'amaha', b'ababa')) + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(TypeError): + self.assertNotStartsWith('ababahalamaha', ord('a')) + + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith('ababahalamaha', 'ababa', 'abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertNotStartsWith('ababahalamaha', 'ababa', msg='abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertEndswith(self): + self.assertEndsWith('ababahalamaha', 'amaha') + self.assertEndsWith('ababahalamaha', ('x', 'amaha', 'y')) + self.assertEndsWith(UserString('ababahalamaha'), 'amaha') + self.assertEndsWith(UserString('ababahalamaha'), ('x', 'amaha', 'y')) + self.assertEndsWith(bytearray(b'ababahalamaha'), b'amaha') + self.assertEndsWith(bytearray(b'ababahalamaha'), (b'x', b'amaha', b'y')) + self.assertEndsWith(b'ababahalamaha', bytearray(b'amaha')) + self.assertEndsWith(b'ababahalamaha', + (bytearray(b'x'), bytearray(b'amaha'), bytearray(b'y'))) + + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith('ababahalamaha', 'ababa') + self.assertEqual(str(cm.exception), + "'ababahalamaha' doesn't end with 'ababa'") + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith('ababahalamaha', ('x', 'y')) + self.assertEqual(str(cm.exception), + "'ababahalamaha' doesn't end with any of ('x', 'y')") + + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith(b'ababahalamaha', 'amaha') + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith(b'ababahalamaha', ('ababa', 'amaha')) + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith([], 'amaha') + self.assertEqual(str(cm.exception), 'Expected str, not list') + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith('ababahalamaha', b'amaha') + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith('ababahalamaha', (b'ababa', b'amaha')) + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(TypeError): + self.assertEndsWith('ababahalamaha', ord('a')) + + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith('ababahalamaha', 'ababa', 'abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertEndsWith('ababahalamaha', 'ababa', msg='abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + + def testAssertNotEndswith(self): + self.assertNotEndsWith('ababahalamaha', 'ababa') + self.assertNotEndsWith('ababahalamaha', ('x', 'ababa', 'y')) + self.assertNotEndsWith(UserString('ababahalamaha'), 'ababa') + self.assertNotEndsWith(UserString('ababahalamaha'), ('x', 'ababa', 'y')) + self.assertNotEndsWith(bytearray(b'ababahalamaha'), b'ababa') + self.assertNotEndsWith(bytearray(b'ababahalamaha'), (b'x', b'ababa', b'y')) + self.assertNotEndsWith(b'ababahalamaha', bytearray(b'ababa')) + self.assertNotEndsWith(b'ababahalamaha', + (bytearray(b'x'), bytearray(b'ababa'), bytearray(b'y'))) + + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith('ababahalamaha', 'amaha') + self.assertEqual(str(cm.exception), + "'ababahalamaha' ends with 'amaha'") + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith('ababahalamaha', ('x', 'amaha', 'y')) + self.assertEqual(str(cm.exception), + "'ababahalamaha' ends with 'amaha'") + + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith(b'ababahalamaha', 'amaha') + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith(b'ababahalamaha', ('ababa', 'amaha')) + self.assertEqual(str(cm.exception), 'Expected str, not bytes') + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith([], 'amaha') + self.assertEqual(str(cm.exception), 'Expected str, not list') + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith('ababahalamaha', b'amaha') + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith('ababahalamaha', (b'ababa', b'amaha')) + self.assertEqual(str(cm.exception), 'Expected bytes, not str') + with self.assertRaises(TypeError): + self.assertNotEndsWith('ababahalamaha', ord('a')) + + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith('ababahalamaha', 'amaha', 'abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + with self.assertRaises(self.failureException) as cm: + self.assertNotEndsWith('ababahalamaha', 'amaha', msg='abracadabra') + self.assertIn('ababahalamaha', str(cm.exception)) + def testDeprecatedFailMethods(self): """Test that the deprecated fail* methods get removed in 3.12""" deprecated_names = [ diff --git a/Lib/test/test_unittest/test_loader.py b/Lib/test/test_unittest/test_loader.py index 83dd25ca54623f..cdff6d1a20c8df 100644 --- a/Lib/test/test_unittest/test_loader.py +++ b/Lib/test/test_unittest/test_loader.py @@ -76,7 +76,7 @@ def runTest(self): loader = unittest.TestLoader() # This has to be false for the test to succeed - self.assertFalse('runTest'.startswith(loader.testMethodPrefix)) + self.assertNotStartsWith('runTest', loader.testMethodPrefix) suite = loader.loadTestsFromTestCase(Foo) self.assertIsInstance(suite, loader.suiteClass) diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py index 0b46f338ac77e1..6092ed292d8f60 100644 --- a/Lib/test/test_unittest/test_program.py +++ b/Lib/test/test_unittest/test_program.py @@ -4,10 +4,10 @@ from test import support import unittest import test.test_unittest -from test.support import force_not_colorized from test.test_unittest.test_result import BufferedWriter +@support.force_not_colorized_test_class class Test_TestProgram(unittest.TestCase): def test_discovery_from_dotted_path(self): @@ -121,23 +121,21 @@ def run(self, test): self.assertEqual(['test.test_unittest', 'test.test_unittest2'], program.testNames) - @force_not_colorized def test_NonExit(self): stream = BufferedWriter() program = unittest.main(exit=False, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=stream), testLoader=self.TestLoader(self.FooBar)) - self.assertTrue(hasattr(program, 'result')) + self.assertHasAttr(program, 'result') out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) self.assertIn('\nERROR: testError ', out) self.assertIn('\nUNEXPECTED SUCCESS: testUnexpectedSuccess ', out) expected = ('\n\nFAILED (failures=1, errors=1, skipped=1, ' 'expected failures=1, unexpected successes=1)\n') - self.assertTrue(out.endswith(expected)) + self.assertEndsWith(out, expected) - @force_not_colorized def test_Exit(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: @@ -153,9 +151,8 @@ def test_Exit(self): self.assertIn('\nUNEXPECTED SUCCESS: testUnexpectedSuccess ', out) expected = ('\n\nFAILED (failures=1, errors=1, skipped=1, ' 'expected failures=1, unexpected successes=1)\n') - self.assertTrue(out.endswith(expected)) + self.assertEndsWith(out, expected) - @force_not_colorized def test_ExitAsDefault(self): stream = BufferedWriter() with self.assertRaises(SystemExit): @@ -169,9 +166,8 @@ def test_ExitAsDefault(self): self.assertIn('\nUNEXPECTED SUCCESS: testUnexpectedSuccess ', out) expected = ('\n\nFAILED (failures=1, errors=1, skipped=1, ' 'expected failures=1, unexpected successes=1)\n') - self.assertTrue(out.endswith(expected)) + self.assertEndsWith(out, expected) - @force_not_colorized def test_ExitSkippedSuite(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: @@ -182,9 +178,8 @@ def test_ExitSkippedSuite(self): self.assertEqual(cm.exception.code, 0) out = stream.getvalue() expected = '\n\nOK (skipped=1)\n' - self.assertTrue(out.endswith(expected)) + self.assertEndsWith(out, expected) - @force_not_colorized def test_ExitEmptySuite(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py index 746b9fa2677717..9ac4c52449c2ff 100644 --- a/Lib/test/test_unittest/test_result.py +++ b/Lib/test/test_unittest/test_result.py @@ -1,13 +1,11 @@ import io import sys import textwrap - -from test.support import warnings_helper, captured_stdout - import traceback import unittest from unittest.util import strclass -from test.support import force_not_colorized +from test.support import warnings_helper +from test.support import captured_stdout, force_not_colorized_test_class from test.test_unittest.support import BufferedWriter @@ -35,6 +33,7 @@ def bad_cleanup2(): raise ValueError('bad cleanup2') +@force_not_colorized_test_class class Test_TestResult(unittest.TestCase): # Note: there are not separate tests for TestResult.wasSuccessful(), # TestResult.errors, TestResult.failures, TestResult.testsRun or @@ -206,7 +205,6 @@ def test_1(self): self.assertIs(test_case, test) self.assertIsInstance(formatted_exc, str) - @force_not_colorized def test_addFailure_filter_traceback_frames(self): class Foo(unittest.TestCase): def test_1(self): @@ -233,7 +231,6 @@ def get_exc_info(): self.assertEqual(len(dropped), 1) self.assertIn("raise self.failureException(msg)", dropped[0]) - @force_not_colorized def test_addFailure_filter_traceback_frames_context(self): class Foo(unittest.TestCase): def test_1(self): @@ -263,7 +260,6 @@ def get_exc_info(): self.assertEqual(len(dropped), 1) self.assertIn("raise self.failureException(msg)", dropped[0]) - @force_not_colorized def test_addFailure_filter_traceback_frames_chained_exception_self_loop(self): class Foo(unittest.TestCase): def test_1(self): @@ -289,7 +285,6 @@ def get_exc_info(): formatted_exc = result.failures[0][1] self.assertEqual(formatted_exc.count("Exception: Loop\n"), 1) - @force_not_colorized def test_addFailure_filter_traceback_frames_chained_exception_cycle(self): class Foo(unittest.TestCase): def test_1(self): @@ -451,7 +446,6 @@ def testFailFast(self): result.addUnexpectedSuccess(None) self.assertTrue(result.shouldStop) - @force_not_colorized def testFailFastSetByRunner(self): stream = BufferedWriter() runner = unittest.TextTestRunner(stream=stream, failfast=True) @@ -460,9 +454,10 @@ def test(result): self.assertTrue(result.failfast) result = runner.run(test) stream.flush() - self.assertTrue(stream.getvalue().endswith('\n\nOK\n')) + self.assertEndsWith(stream.getvalue(), '\n\nOK\n') +@force_not_colorized_test_class class Test_TextTestResult(unittest.TestCase): maxDiff = None @@ -625,7 +620,6 @@ def _run_test(self, test_name, verbosity, tearDownError=None): test.run(result) return stream.getvalue() - @force_not_colorized def testDotsOutput(self): self.assertEqual(self._run_test('testSuccess', 1), '.') self.assertEqual(self._run_test('testSkip', 1), 's') @@ -634,7 +628,6 @@ def testDotsOutput(self): self.assertEqual(self._run_test('testExpectedFailure', 1), 'x') self.assertEqual(self._run_test('testUnexpectedSuccess', 1), 'u') - @force_not_colorized def testLongOutput(self): classname = f'{__name__}.{self.Test.__qualname__}' self.assertEqual(self._run_test('testSuccess', 2), @@ -650,21 +643,17 @@ def testLongOutput(self): self.assertEqual(self._run_test('testUnexpectedSuccess', 2), f'testUnexpectedSuccess ({classname}.testUnexpectedSuccess) ... unexpected success\n') - @force_not_colorized def testDotsOutputSubTestSuccess(self): self.assertEqual(self._run_test('testSubTestSuccess', 1), '.') - @force_not_colorized def testLongOutputSubTestSuccess(self): classname = f'{__name__}.{self.Test.__qualname__}' self.assertEqual(self._run_test('testSubTestSuccess', 2), f'testSubTestSuccess ({classname}.testSubTestSuccess) ... ok\n') - @force_not_colorized def testDotsOutputSubTestMixed(self): self.assertEqual(self._run_test('testSubTestMixed', 1), 'sFE') - @force_not_colorized def testLongOutputSubTestMixed(self): classname = f'{__name__}.{self.Test.__qualname__}' self.assertEqual(self._run_test('testSubTestMixed', 2), @@ -673,7 +662,6 @@ def testLongOutputSubTestMixed(self): f' testSubTestMixed ({classname}.testSubTestMixed) [fail] (c=3) ... FAIL\n' f' testSubTestMixed ({classname}.testSubTestMixed) [error] (d=4) ... ERROR\n') - @force_not_colorized def testDotsOutputTearDownFail(self): out = self._run_test('testSuccess', 1, AssertionError('fail')) self.assertEqual(out, 'F') @@ -684,7 +672,6 @@ def testDotsOutputTearDownFail(self): out = self._run_test('testSkip', 1, AssertionError('fail')) self.assertEqual(out, 'sF') - @force_not_colorized def testLongOutputTearDownFail(self): classname = f'{__name__}.{self.Test.__qualname__}' out = self._run_test('testSuccess', 2, AssertionError('fail')) @@ -772,6 +759,7 @@ def testFoo(self): runner.run(Test('testFoo')) +@force_not_colorized_test_class class TestOutputBuffering(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py index 1131cd73128866..4d3cfd60b8d9c3 100644 --- a/Lib/test/test_unittest/test_runner.py +++ b/Lib/test/test_unittest/test_runner.py @@ -4,7 +4,6 @@ import pickle import subprocess from test import support -from test.support import force_not_colorized import unittest from unittest.case import _Outcome @@ -107,7 +106,7 @@ def cleanup2(*args, **kwargs): self.assertTrue(test.doCleanups()) self.assertEqual(cleanups, [(2, (), {}), (1, (1, 2, 3), dict(four='hello', five='goodbye'))]) - @force_not_colorized + @support.force_not_colorized def testCleanUpWithErrors(self): class TestableTest(unittest.TestCase): def testNothing(self): @@ -251,6 +250,7 @@ def testNothing(self): self.assertEqual(test._cleanups, []) +@support.force_not_colorized_test_class class TestClassCleanup(unittest.TestCase): def test_addClassCleanUp(self): class TestableTest(unittest.TestCase): @@ -418,7 +418,6 @@ def cleanup2(): self.assertIsInstance(e2[1], CustomError) self.assertEqual(str(e2[1]), 'cleanup1') - @force_not_colorized def test_with_errors_addCleanUp(self): ordering = [] class TestableTest(unittest.TestCase): @@ -442,7 +441,6 @@ def tearDownClass(cls): ['setUpClass', 'setUp', 'cleanup_exc', 'tearDownClass', 'cleanup_good']) - @force_not_colorized def test_run_with_errors_addClassCleanUp(self): ordering = [] class TestableTest(unittest.TestCase): @@ -466,7 +464,6 @@ def tearDownClass(cls): ['setUpClass', 'setUp', 'test', 'cleanup_good', 'tearDownClass', 'cleanup_exc']) - @force_not_colorized def test_with_errors_in_addClassCleanup_and_setUps(self): ordering = [] class_blow_up = False @@ -519,7 +516,6 @@ def tearDownClass(cls): ['setUpClass', 'setUp', 'tearDownClass', 'cleanup_exc']) - @force_not_colorized def test_with_errors_in_tearDownClass(self): ordering = [] class TestableTest(unittest.TestCase): @@ -596,7 +592,6 @@ def test(self): 'inner setup', 'inner test', 'inner cleanup', 'end outer test', 'outer cleanup']) - @force_not_colorized def test_run_empty_suite_error_message(self): class EmptyTest(unittest.TestCase): pass @@ -608,6 +603,7 @@ class EmptyTest(unittest.TestCase): self.assertIn("\nNO TESTS RAN\n", runner.stream.getvalue()) +@support.force_not_colorized_test_class class TestModuleCleanUp(unittest.TestCase): def test_add_and_do_ModuleCleanup(self): module_cleanups = [] @@ -670,7 +666,6 @@ class Module(object): self.assertEqual(cleanups, [((1, 2), {'function': 'hello'})]) - @force_not_colorized def test_run_module_cleanUp(self): blowUp = True ordering = [] @@ -810,7 +805,6 @@ def tearDownClass(cls): 'tearDownClass', 'cleanup_good']) self.assertEqual(unittest.case._module_cleanups, []) - @force_not_colorized def test_run_module_cleanUp_when_teardown_exception(self): ordering = [] class Module(object): @@ -972,7 +966,6 @@ def testNothing(self): self.assertEqual(cleanups, [((1, 2), {'function': 3, 'self': 4})]) - @force_not_colorized def test_with_errors_in_addClassCleanup(self): ordering = [] @@ -1006,7 +999,6 @@ def tearDownClass(cls): ['setUpModule', 'setUpClass', 'test', 'tearDownClass', 'cleanup_exc', 'tearDownModule', 'cleanup_good']) - @force_not_colorized def test_with_errors_in_addCleanup(self): ordering = [] class Module(object): @@ -1037,7 +1029,6 @@ def tearDown(self): ['setUpModule', 'setUp', 'test', 'tearDown', 'cleanup_exc', 'tearDownModule', 'cleanup_good']) - @force_not_colorized def test_with_errors_in_addModuleCleanup_and_setUps(self): ordering = [] module_blow_up = False @@ -1330,7 +1321,7 @@ def MockResultClass(*args): expectedresult = (runner.stream, DESCRIPTIONS, VERBOSITY) self.assertEqual(runner._makeResult(), expectedresult) - @force_not_colorized + @support.force_not_colorized @support.requires_subprocess() def test_warnings(self): """ diff --git a/Lib/test/test_unittest/testmock/testasync.py b/Lib/test/test_unittest/testmock/testasync.py index afc9d1f11da1e2..0791675b5401ca 100644 --- a/Lib/test/test_unittest/testmock/testasync.py +++ b/Lib/test/test_unittest/testmock/testasync.py @@ -586,16 +586,16 @@ def test_sync_magic_methods_return_magic_mocks(self): def test_magicmock_has_async_magic_methods(self): m_mock = MagicMock() - self.assertTrue(hasattr(m_mock, "__aenter__")) - self.assertTrue(hasattr(m_mock, "__aexit__")) - self.assertTrue(hasattr(m_mock, "__anext__")) + self.assertHasAttr(m_mock, "__aenter__") + self.assertHasAttr(m_mock, "__aexit__") + self.assertHasAttr(m_mock, "__anext__") def test_asyncmock_has_sync_magic_methods(self): a_mock = AsyncMock() - self.assertTrue(hasattr(a_mock, "__enter__")) - self.assertTrue(hasattr(a_mock, "__exit__")) - self.assertTrue(hasattr(a_mock, "__next__")) - self.assertTrue(hasattr(a_mock, "__len__")) + self.assertHasAttr(a_mock, "__enter__") + self.assertHasAttr(a_mock, "__exit__") + self.assertHasAttr(a_mock, "__next__") + self.assertHasAttr(a_mock, "__len__") def test_magic_methods_are_async_functions(self): m_mock = MagicMock() diff --git a/Lib/test/test_unittest/testmock/testcallable.py b/Lib/test/test_unittest/testmock/testcallable.py index ca88511f63959d..03cb983e447c70 100644 --- a/Lib/test/test_unittest/testmock/testcallable.py +++ b/Lib/test/test_unittest/testmock/testcallable.py @@ -23,21 +23,21 @@ def assertNotCallable(self, mock): def test_non_callable(self): for mock in NonCallableMagicMock(), NonCallableMock(): self.assertRaises(TypeError, mock) - self.assertFalse(hasattr(mock, '__call__')) + self.assertNotHasAttr(mock, '__call__') self.assertIn(mock.__class__.__name__, repr(mock)) def test_hierarchy(self): - self.assertTrue(issubclass(MagicMock, Mock)) - self.assertTrue(issubclass(NonCallableMagicMock, NonCallableMock)) + self.assertIsSubclass(MagicMock, Mock) + self.assertIsSubclass(NonCallableMagicMock, NonCallableMock) def test_attributes(self): one = NonCallableMock() - self.assertTrue(issubclass(type(one.one), Mock)) + self.assertIsSubclass(type(one.one), Mock) two = NonCallableMagicMock() - self.assertTrue(issubclass(type(two.two), MagicMock)) + self.assertIsSubclass(type(two.two), MagicMock) def test_subclasses(self): @@ -45,13 +45,13 @@ class MockSub(Mock): pass one = MockSub() - self.assertTrue(issubclass(type(one.one), MockSub)) + self.assertIsSubclass(type(one.one), MockSub) class MagicSub(MagicMock): pass two = MagicSub() - self.assertTrue(issubclass(type(two.two), MagicSub)) + self.assertIsSubclass(type(two.two), MagicSub) def test_patch_spec(self): diff --git a/Lib/test/test_unittest/testmock/testhelpers.py b/Lib/test/test_unittest/testmock/testhelpers.py index f260769eb8c35e..8d0f3ebc5cba88 100644 --- a/Lib/test/test_unittest/testmock/testhelpers.py +++ b/Lib/test/test_unittest/testmock/testhelpers.py @@ -951,7 +951,7 @@ def __getattr__(self, attribute): proxy = Foo() autospec = create_autospec(proxy) - self.assertFalse(hasattr(autospec, '__name__')) + self.assertNotHasAttr(autospec, '__name__') def test_autospec_signature_staticmethod(self): diff --git a/Lib/test/test_unittest/testmock/testmagicmethods.py b/Lib/test/test_unittest/testmock/testmagicmethods.py index 2a8aa11b3284f6..acdbd699d18134 100644 --- a/Lib/test/test_unittest/testmock/testmagicmethods.py +++ b/Lib/test/test_unittest/testmock/testmagicmethods.py @@ -10,13 +10,13 @@ class TestMockingMagicMethods(unittest.TestCase): def test_deleting_magic_methods(self): mock = Mock() - self.assertFalse(hasattr(mock, '__getitem__')) + self.assertNotHasAttr(mock, '__getitem__') mock.__getitem__ = Mock() - self.assertTrue(hasattr(mock, '__getitem__')) + self.assertHasAttr(mock, '__getitem__') del mock.__getitem__ - self.assertFalse(hasattr(mock, '__getitem__')) + self.assertNotHasAttr(mock, '__getitem__') def test_magicmock_del(self): @@ -252,12 +252,12 @@ def test_magicmock(self): self.assertEqual(list(mock), [1, 2, 3]) getattr(mock, '__bool__').return_value = False - self.assertFalse(hasattr(mock, '__nonzero__')) + self.assertNotHasAttr(mock, '__nonzero__') self.assertFalse(bool(mock)) for entry in _magics: - self.assertTrue(hasattr(mock, entry)) - self.assertFalse(hasattr(mock, '__imaginary__')) + self.assertHasAttr(mock, entry) + self.assertNotHasAttr(mock, '__imaginary__') def test_magic_mock_equality(self): diff --git a/Lib/test/test_unittest/testmock/testmock.py b/Lib/test/test_unittest/testmock/testmock.py index e1b108f81e513c..5d1bf4258afacd 100644 --- a/Lib/test/test_unittest/testmock/testmock.py +++ b/Lib/test/test_unittest/testmock/testmock.py @@ -2215,13 +2215,13 @@ def test_attach_mock_patch_autospec_signature(self): def test_attribute_deletion(self): for mock in (Mock(), MagicMock(), NonCallableMagicMock(), NonCallableMock()): - self.assertTrue(hasattr(mock, 'm')) + self.assertHasAttr(mock, 'm') del mock.m - self.assertFalse(hasattr(mock, 'm')) + self.assertNotHasAttr(mock, 'm') del mock.f - self.assertFalse(hasattr(mock, 'f')) + self.assertNotHasAttr(mock, 'f') self.assertRaises(AttributeError, getattr, mock, 'f') @@ -2230,18 +2230,18 @@ def test_mock_does_not_raise_on_repeated_attribute_deletion(self): for mock in (Mock(), MagicMock(), NonCallableMagicMock(), NonCallableMock()): mock.foo = 3 - self.assertTrue(hasattr(mock, 'foo')) + self.assertHasAttr(mock, 'foo') self.assertEqual(mock.foo, 3) del mock.foo - self.assertFalse(hasattr(mock, 'foo')) + self.assertNotHasAttr(mock, 'foo') mock.foo = 4 - self.assertTrue(hasattr(mock, 'foo')) + self.assertHasAttr(mock, 'foo') self.assertEqual(mock.foo, 4) del mock.foo - self.assertFalse(hasattr(mock, 'foo')) + self.assertNotHasAttr(mock, 'foo') def test_mock_raises_when_deleting_nonexistent_attribute(self): @@ -2259,7 +2259,7 @@ def test_reset_mock_does_not_raise_on_attr_deletion(self): mock.child = True del mock.child mock.reset_mock() - self.assertFalse(hasattr(mock, 'child')) + self.assertNotHasAttr(mock, 'child') def test_class_assignable(self): diff --git a/Lib/test/test_unittest/testmock/testpatch.py b/Lib/test/test_unittest/testmock/testpatch.py index 037c021e6eafcf..7c5fc3deed2ca2 100644 --- a/Lib/test/test_unittest/testmock/testpatch.py +++ b/Lib/test/test_unittest/testmock/testpatch.py @@ -366,7 +366,7 @@ def test(): self.assertEqual(SomeClass.frooble, sentinel.Frooble) test() - self.assertFalse(hasattr(SomeClass, 'frooble')) + self.assertNotHasAttr(SomeClass, 'frooble') def test_patch_wont_create_by_default(self): @@ -383,7 +383,7 @@ def test_patchobject_wont_create_by_default(self): @patch.object(SomeClass, 'ord', sentinel.Frooble) def test(): pass test() - self.assertFalse(hasattr(SomeClass, 'ord')) + self.assertNotHasAttr(SomeClass, 'ord') def test_patch_builtins_without_create(self): @@ -1477,7 +1477,7 @@ def test_patch_multiple_create(self): finally: patcher.stop() - self.assertFalse(hasattr(Foo, 'blam')) + self.assertNotHasAttr(Foo, 'blam') def test_patch_multiple_spec_set(self): diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 35394f29fbe49d..f6c4f1f3f6476a 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -513,11 +513,13 @@ def test_class_bases_and_keywords(self): self.check_src_roundtrip("class X(*args, **kwargs):\n pass") def test_fstrings(self): - self.check_src_roundtrip("f'-{f'*{f'+{f'.{x}.'}+'}*'}-'") - self.check_src_roundtrip("f'\\u2028{'x'}'") + self.check_src_roundtrip('''f\'\'\'-{f"""*{f"+{f'.{x}.'}+"}*"""}-\'\'\'''') + self.check_src_roundtrip('''f\'-{f\'\'\'*{f"""+{f".{f'{x}'}."}+"""}*\'\'\'}-\'''') + self.check_src_roundtrip('''f\'-{f\'*{f\'\'\'+{f""".{f"{f'{x}'}"}."""}+\'\'\'}*\'}-\'''') + self.check_src_roundtrip('''f"\\u2028{'x'}"''') self.check_src_roundtrip(r"f'{x}\n'") - self.check_src_roundtrip("f'{'\\n'}\\n'") - self.check_src_roundtrip("f'{f'{x}\\n'}\\n'") + self.check_src_roundtrip('''f"{'\\n'}\\n"''') + self.check_src_roundtrip('''f"{f'{x}\\n'}\\n"''') def test_docstrings(self): docstrings = ( @@ -651,7 +653,9 @@ def test_multiquote_joined_string(self): def test_backslash_in_format_spec(self): import re - msg = re.escape("invalid escape sequence '\\ '") + msg = re.escape('"\\ " is an invalid escape sequence. ' + 'Such sequences will not work in the future. ' + 'Did you mean "\\\\ "? A raw string is also an option.') with self.assertWarnsRegex(SyntaxWarning, msg): self.check_ast_roundtrip("""f"{x:\\ }" """) self.check_ast_roundtrip("""f"{x:\\n}" """) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 042d3b35b77022..4842428d6fd103 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -419,7 +419,9 @@ def test_read_bogus(self): Content-Type: text/html; charset=iso-8859-1 ''', mock_close=True) try: - self.assertRaises(OSError, urllib.request.urlopen, "http://python.org/") + with self.assertRaises(urllib.error.HTTPError) as cm: + urllib.request.urlopen("http://python.org/") + cm.exception.close() finally: self.unfakehttp() @@ -434,8 +436,9 @@ def test_invalid_redirect(self): ''', mock_close=True) try: msg = "Redirection to url 'file:" - with self.assertRaisesRegex(urllib.error.HTTPError, msg): + with self.assertRaisesRegex(urllib.error.HTTPError, msg) as cm: urllib.request.urlopen("http://python.org/") + cm.exception.close() finally: self.unfakehttp() @@ -448,8 +451,9 @@ def test_redirect_limit_independent(self): Connection: close ''', mock_close=True) try: - self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, - "http://something") + with self.assertRaises(urllib.error.HTTPError) as cm: + urllib.request.urlopen("http://something") + cm.exception.close() finally: self.unfakehttp() @@ -529,10 +533,11 @@ def setUp(self): "QOjdAAAAAXNSR0IArs4c6QAAAA9JREFUCNdj%0AYGBg%2BP//PwAGAQL%2BCm8 " "vHgAAAABJRU5ErkJggg%3D%3D%0A%20") - self.text_url_resp = urllib.request.urlopen(self.text_url) - self.text_url_base64_resp = urllib.request.urlopen( - self.text_url_base64) - self.image_url_resp = urllib.request.urlopen(self.image_url) + self.text_url_resp = self.enterContext( + urllib.request.urlopen(self.text_url)) + self.text_url_base64_resp = self.enterContext( + urllib.request.urlopen(self.text_url_base64)) + self.image_url_resp = self.enterContext(urllib.request.urlopen(self.image_url)) def test_interface(self): # Make sure object returned by urlopen() has the specified methods @@ -548,8 +553,10 @@ def test_info(self): [('text/plain', ''), ('charset', 'ISO-8859-1')]) self.assertEqual(self.image_url_resp.info()['content-length'], str(len(self.image))) - self.assertEqual(urllib.request.urlopen("data:,").info().get_params(), + r = urllib.request.urlopen("data:,") + self.assertEqual(r.info().get_params(), [('text/plain', ''), ('charset', 'US-ASCII')]) + r.close() def test_geturl(self): self.assertEqual(self.text_url_resp.geturl(), self.text_url) diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index 085b24c25b2daa..44e6af8c6b6868 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -782,6 +782,7 @@ def connect_ftp(self, user, passwd, host, port, dirs, headers = r.info() self.assertEqual(headers.get("Content-type"), mimetype) self.assertEqual(int(headers["Content-length"]), len(data)) + r.close() @support.requires_resource("network") def test_ftp_error(self): @@ -1247,10 +1248,11 @@ def test_redirect(self): try: method(req, MockFile(), code, "Blah", MockHeaders({"location": to_url})) - except urllib.error.HTTPError: + except urllib.error.HTTPError as err: # 307 and 308 in response to POST require user OK self.assertIn(code, (307, 308)) self.assertIsNotNone(data) + err.close() self.assertEqual(o.req.get_full_url(), to_url) try: self.assertEqual(o.req.get_method(), "GET") @@ -1286,9 +1288,10 @@ def redirect(h, req, url=to_url): while 1: redirect(h, req, "http://example.com/") count = count + 1 - except urllib.error.HTTPError: + except urllib.error.HTTPError as err: # don't stop until max_repeats, because cookies may introduce state self.assertEqual(count, urllib.request.HTTPRedirectHandler.max_repeats) + err.close() # detect endless non-repeating chain of redirects req = Request(from_url, origin_req_host="example.com") @@ -1298,9 +1301,10 @@ def redirect(h, req, url=to_url): while 1: redirect(h, req, "http://example.com/%d" % count) count = count + 1 - except urllib.error.HTTPError: + except urllib.error.HTTPError as err: self.assertEqual(count, urllib.request.HTTPRedirectHandler.max_redirections) + err.close() def test_invalid_redirect(self): from_url = "http://example.com/a.html" @@ -1314,9 +1318,11 @@ def test_invalid_redirect(self): for scheme in invalid_schemes: invalid_url = scheme + '://' + schemeless_url - self.assertRaises(urllib.error.HTTPError, h.http_error_302, + with self.assertRaises(urllib.error.HTTPError) as cm: + h.http_error_302( req, MockFile(), 302, "Security Loophole", MockHeaders({"location": invalid_url})) + cm.exception.close() for scheme in valid_schemes: valid_url = scheme + '://' + schemeless_url @@ -1912,11 +1918,13 @@ def test_HTTPError_interface(self): self.assertEqual(str(err), expected_errmsg) expected_errmsg = '' % (err.code, err.msg) self.assertEqual(repr(err), expected_errmsg) + err.close() def test_gh_98778(self): x = urllib.error.HTTPError("url", 405, "METHOD NOT ALLOWED", None, None) self.assertEqual(getattr(x, "__notes__", ()), ()) self.assertIsInstance(x.fp.read(), bytes) + x.close() def test_parse_proxy(self): parse_proxy_test_cases = [ diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py index 50c491a3cfd3d0..9cb15d61c2ad4d 100644 --- a/Lib/test/test_urllib2_localnet.py +++ b/Lib/test/test_urllib2_localnet.py @@ -316,7 +316,9 @@ def test_basic_auth_httperror(self): ah = urllib.request.HTTPBasicAuthHandler() ah.add_password(self.REALM, self.server_url, self.USER, self.INCORRECT_PASSWD) urllib.request.install_opener(urllib.request.build_opener(ah)) - self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, self.server_url) + with self.assertRaises(urllib.error.HTTPError) as cm: + urllib.request.urlopen(self.server_url) + cm.exception.close() @hashlib_helper.requires_hashdigest("md5", openssl=True) @@ -362,15 +364,15 @@ def test_proxy_with_bad_password_raises_httperror(self): self.proxy_digest_handler.add_password(self.REALM, self.URL, self.USER, self.PASSWD+"bad") self.digest_auth_handler.set_qop("auth") - self.assertRaises(urllib.error.HTTPError, - self.opener.open, - self.URL) + with self.assertRaises(urllib.error.HTTPError) as cm: + self.opener.open(self.URL) + cm.exception.close() def test_proxy_with_no_password_raises_httperror(self): self.digest_auth_handler.set_qop("auth") - self.assertRaises(urllib.error.HTTPError, - self.opener.open, - self.URL) + with self.assertRaises(urllib.error.HTTPError) as cm: + self.opener.open(self.URL) + cm.exception.close() def test_proxy_qop_auth_works(self): self.proxy_digest_handler.add_password(self.REALM, self.URL, diff --git a/Lib/test/test_urllib_response.py b/Lib/test/test_urllib_response.py index b76763f4ed824f..d949fa38bfc42f 100644 --- a/Lib/test/test_urllib_response.py +++ b/Lib/test/test_urllib_response.py @@ -48,6 +48,7 @@ def test_addinfo(self): info = urllib.response.addinfo(self.fp, self.test_headers) self.assertEqual(info.info(), self.test_headers) self.assertEqual(info.headers, self.test_headers) + info.close() def test_addinfourl(self): url = "http://www.python.org" @@ -60,6 +61,7 @@ def test_addinfourl(self): self.assertEqual(infourl.headers, self.test_headers) self.assertEqual(infourl.url, url) self.assertEqual(infourl.status, code) + infourl.close() def tearDown(self): self.sock.close() diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index f824dddf711761..ce4e60e3a8011d 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -106,6 +106,7 @@ def test_getcode(self): with urllib.request.urlopen(URL): pass self.assertEqual(e.exception.code, 404) + e.exception.close() @support.requires_resource('walltime') def test_bad_address(self): diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 4516bdea6adb19..b51cc006b73280 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -1412,16 +1412,51 @@ def test_invalid_bracketed_hosts(self): self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@[0439:23af::2309::fae7:1234]/Path?Query') self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@[0439:23af:2309::fae7:1234:2342:438e:192.0.2.146]/Path?Query') self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@]v6a.ip[/Path') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:a1') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:a1') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:1a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:1a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://user@prefix.[v6a.ip]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://user@[v6a.ip].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://]v6a.ip[') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://]v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip[') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix]v6a.ip[suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix]v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip[suffix') def test_splitting_bracketed_hosts(self): - p1 = urllib.parse.urlsplit('scheme://user@[v6a.ip]/path?query') + p1 = urllib.parse.urlsplit('scheme://user@[v6a.ip]:1234/path?query') self.assertEqual(p1.hostname, 'v6a.ip') self.assertEqual(p1.username, 'user') self.assertEqual(p1.path, '/path') + self.assertEqual(p1.port, 1234) p2 = urllib.parse.urlsplit('scheme://user@[0439:23af:2309::fae7%test]/path?query') self.assertEqual(p2.hostname, '0439:23af:2309::fae7%test') self.assertEqual(p2.username, 'user') self.assertEqual(p2.path, '/path') + self.assertIs(p2.port, None) p3 = urllib.parse.urlsplit('scheme://user@[0439:23af:2309::fae7:1234:192.0.2.146%test]/path?query') self.assertEqual(p3.hostname, '0439:23af:2309::fae7:1234:192.0.2.146%test') self.assertEqual(p3.username, 'user') diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index 8f40dd97f42fdc..8216c4dd00e35a 100755 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -21,7 +21,7 @@ def importable(name): try: __import__(name) return True - except: + except ModuleNotFoundError: return False @@ -34,6 +34,47 @@ def get_command_stdout(command, args): class BaseTestUUID: uuid = None + def test_nil_uuid(self): + nil_uuid = self.uuid.NIL + + s = '00000000-0000-0000-0000-000000000000' + i = 0 + self.assertEqual(nil_uuid, self.uuid.UUID(s)) + self.assertEqual(nil_uuid, self.uuid.UUID(int=i)) + self.assertEqual(nil_uuid.int, i) + self.assertEqual(str(nil_uuid), s) + # The Nil UUID falls within the range of the Apollo NCS variant as per + # RFC 9562. + # See https://www.rfc-editor.org/rfc/rfc9562.html#section-5.9-4 + self.assertEqual(nil_uuid.variant, self.uuid.RESERVED_NCS) + # A version field of all zeros is "Unused" in RFC 9562, but the version + # field also only applies to the 10xx variant, i.e. the variant + # specified in RFC 9562. As such, because the Nil UUID falls under a + # different variant, its version is considered undefined. + # See https://www.rfc-editor.org/rfc/rfc9562.html#table2 + self.assertIsNone(nil_uuid.version) + + def test_max_uuid(self): + max_uuid = self.uuid.MAX + + s = 'ffffffff-ffff-ffff-ffff-ffffffffffff' + i = (1 << 128) - 1 + self.assertEqual(max_uuid, self.uuid.UUID(s)) + self.assertEqual(max_uuid, self.uuid.UUID(int=i)) + self.assertEqual(max_uuid.int, i) + self.assertEqual(str(max_uuid), s) + # The Max UUID falls within the range of the "yet-to-be defined" future + # UUID variant as per RFC 9562. + # See https://www.rfc-editor.org/rfc/rfc9562.html#section-5.10-4 + self.assertEqual(max_uuid.variant, self.uuid.RESERVED_FUTURE) + # A version field of all ones is "Reserved for future definition" in + # RFC 9562, but the version field also only applies to the 10xx + # variant, i.e. the variant specified in RFC 9562. As such, because the + # Max UUID falls under a different variant, its version is considered + # undefined. + # See https://www.rfc-editor.org/rfc/rfc9562.html#table2 + self.assertIsNone(max_uuid.version) + def test_safe_uuid_enum(self): class CheckedSafeUUID(enum.Enum): safe = 0 diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 0b09010c69d4ea..6e23097deaf221 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -111,10 +111,6 @@ def get_text_file_contents(self, *args, encoding='utf-8'): result = f.read() return result - def assertEndsWith(self, string, tail): - if not string.endswith(tail): - self.fail(f"String {string!r} does not end with {tail!r}") - class BasicTest(BaseTest): """Test venv module functionality.""" diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 4e3c877896f295..4bd164b8a9a82b 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -1521,7 +1521,7 @@ def test_late_resource_warning(self): self.assertTrue(err.startswith(expected), ascii(err)) -class DeprecatedTests(unittest.TestCase): +class DeprecatedTests(PyPublicAPITests): def test_dunder_deprecated(self): @deprecated("A will go away soon") class A: diff --git a/Lib/test/test_xml_dom_xmlbuilder.py b/Lib/test/test_xml_dom_xmlbuilder.py new file mode 100644 index 00000000000000..5f5f2eb328df9f --- /dev/null +++ b/Lib/test/test_xml_dom_xmlbuilder.py @@ -0,0 +1,88 @@ +import io +import unittest +from http import client +from test.test_httplib import FakeSocket +from unittest import mock +from xml.dom import getDOMImplementation, minidom, xmlbuilder + +SMALL_SAMPLE = b""" + + +Introduction to XSL +
+

A. Namespace

+""" + + +class XMLBuilderTest(unittest.TestCase): + def test_entity_resolver(self): + body = ( + b"HTTP/1.1 200 OK\r\nContent-Type: text/xml; charset=utf-8\r\n\r\n" + + SMALL_SAMPLE + ) + + sock = FakeSocket(body) + response = client.HTTPResponse(sock) + response.begin() + attrs = {"open.return_value": response} + opener = mock.Mock(**attrs) + + resolver = xmlbuilder.DOMEntityResolver() + + with mock.patch("urllib.request.build_opener") as mock_build: + mock_build.return_value = opener + source = resolver.resolveEntity(None, "http://example.com/2000/svg") + + self.assertIsInstance(source, xmlbuilder.DOMInputSource) + self.assertIsNone(source.publicId) + self.assertEqual(source.systemId, "http://example.com/2000/svg") + self.assertEqual(source.baseURI, "http://example.com/2000/") + self.assertEqual(source.encoding, "utf-8") + self.assertIs(source.byteStream, response) + + self.assertIsNone(source.characterStream) + self.assertIsNone(source.stringData) + + def test_builder(self): + imp = getDOMImplementation() + self.assertIsInstance(imp, xmlbuilder.DOMImplementationLS) + + builder = imp.createDOMBuilder(imp.MODE_SYNCHRONOUS, None) + self.assertIsInstance(builder, xmlbuilder.DOMBuilder) + + def test_parse_uri(self): + body = ( + b"HTTP/1.1 200 OK\r\nContent-Type: text/xml; charset=utf-8\r\n\r\n" + + SMALL_SAMPLE + ) + + sock = FakeSocket(body) + response = client.HTTPResponse(sock) + response.begin() + attrs = {"open.return_value": response} + opener = mock.Mock(**attrs) + + with mock.patch("urllib.request.build_opener") as mock_build: + mock_build.return_value = opener + + imp = getDOMImplementation() + builder = imp.createDOMBuilder(imp.MODE_SYNCHRONOUS, None) + document = builder.parseURI("http://example.com/2000/svg") + + self.assertIsInstance(document, minidom.Document) + self.assertEqual(len(document.childNodes), 1) + + def test_parse_with_systemId(self): + response = io.BytesIO(SMALL_SAMPLE) + + with mock.patch("urllib.request.urlopen") as mock_open: + mock_open.return_value = response + + imp = getDOMImplementation() + source = imp.createDOMInputSource() + builder = imp.createDOMBuilder(imp.MODE_SYNCHRONOUS, None) + source.systemId = "http://example.com/2000/svg" + document = builder.parse(source) + + self.assertIsInstance(document, minidom.Document) + self.assertEqual(len(document.childNodes), 1) diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index 49f39b9337df85..6b1fe56074d561 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -1,3 +1,4 @@ +import _pyio import array import contextlib import importlib.util @@ -19,10 +20,11 @@ from random import randint, random, randbytes from test import archiver_tests -from test.support import script_helper +from test.support import script_helper, os_helper from test.support import ( findfile, requires_zlib, requires_bz2, requires_lzma, - captured_stdout, captured_stderr, requires_subprocess + captured_stdout, captured_stderr, requires_subprocess, + is_emscripten ) from test.support.os_helper import ( TESTFN, unlink, rmtree, temp_dir, temp_cwd, fd_count, FakePath @@ -1781,6 +1783,35 @@ def test_writestr_extended_local_header_issue1202(self): zinfo.flag_bits |= zipfile._MASK_USE_DATA_DESCRIPTOR # Include an extended local header. orig_zip.writestr(zinfo, data) + def test_write_with_source_date_epoch(self): + with os_helper.EnvironmentVarGuard() as env: + # Set the SOURCE_DATE_EPOCH environment variable to a specific timestamp + env['SOURCE_DATE_EPOCH'] = "1735715999" + + with zipfile.ZipFile(TESTFN, "w") as zf: + zf.writestr("test_source_date_epoch.txt", "Testing SOURCE_DATE_EPOCH") + + with zipfile.ZipFile(TESTFN, "r") as zf: + zip_info = zf.getinfo("test_source_date_epoch.txt") + get_time = time.localtime(int(os.environ['SOURCE_DATE_EPOCH']))[:6] + # Compare each element of the date_time tuple + # Allow for a 1-second difference + for z_time, g_time in zip(zip_info.date_time, get_time): + self.assertAlmostEqual(z_time, g_time, delta=1) + + def test_write_without_source_date_epoch(self): + with os_helper.EnvironmentVarGuard() as env: + del env['SOURCE_DATE_EPOCH'] + + with zipfile.ZipFile(TESTFN, "w") as zf: + zf.writestr("test_no_source_date_epoch.txt", "Testing without SOURCE_DATE_EPOCH") + + with zipfile.ZipFile(TESTFN, "r") as zf: + zip_info = zf.getinfo("test_no_source_date_epoch.txt") + current_time = time.localtime()[:6] + for z_time, c_time in zip(zip_info.date_time, current_time): + self.assertAlmostEqual(z_time, c_time, delta=1) + def test_close(self): """Check that the zipfile is closed after the 'with' block.""" with zipfile.ZipFile(TESTFN2, "w") as zipfp: @@ -3489,5 +3520,87 @@ def test_too_short(self): b"zzz", zipfile._Extra.strip(b"zzz", (self.ZIP64_EXTRA,))) +class StatIO(_pyio.BytesIO): + """Buffer which remembers the number of bytes that were read.""" + + def __init__(self): + super().__init__() + self.bytes_read = 0 + + def read(self, size=-1): + bs = super().read(size) + self.bytes_read += len(bs) + return bs + + +class StoredZipExtFileRandomReadTest(unittest.TestCase): + """Tests whether an uncompressed, unencrypted zip entry can be randomly + seek and read without reading redundant bytes.""" + def test_stored_seek_and_read(self): + + sio = StatIO() + # 20000 bytes + txt = b'0123456789' * 2000 + + # The seek length must be greater than ZipExtFile.MIN_READ_SIZE + # as `ZipExtFile._read2()` reads in blocks of this size and we + # need to seek out of the buffered data + read_buffer_size = zipfile.ZipExtFile.MIN_READ_SIZE + self.assertGreaterEqual(10002, read_buffer_size) # for forward seek test + self.assertGreaterEqual(5003, read_buffer_size) # for backward seek test + # The read length must be less than MIN_READ_SIZE, since we assume that + # only 1 block is read in the test. + read_length = 100 + self.assertGreaterEqual(read_buffer_size, read_length) # for read() calls + + with zipfile.ZipFile(sio, "w", compression=zipfile.ZIP_STORED) as zipf: + zipf.writestr("foo.txt", txt) + + # check random seek and read on a file + with zipfile.ZipFile(sio, "r") as zipf: + with zipf.open("foo.txt", "r") as fp: + # Test this optimized read hasn't rewound and read from the + # start of the file (as in the case of the unoptimized path) + + # forward seek + old_count = sio.bytes_read + forward_seek_len = 10002 + current_pos = 0 + fp.seek(forward_seek_len, os.SEEK_CUR) + current_pos += forward_seek_len + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(fp._left, fp._compress_left) + arr = fp.read(read_length) + current_pos += read_length + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(arr, txt[current_pos - read_length:current_pos]) + self.assertEqual(fp._left, fp._compress_left) + read_count = sio.bytes_read - old_count + self.assertLessEqual(read_count, read_buffer_size) + + # backward seek + old_count = sio.bytes_read + backward_seek_len = 5003 + fp.seek(-backward_seek_len, os.SEEK_CUR) + current_pos -= backward_seek_len + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(fp._left, fp._compress_left) + arr = fp.read(read_length) + current_pos += read_length + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(arr, txt[current_pos - read_length:current_pos]) + self.assertEqual(fp._left, fp._compress_left) + read_count = sio.bytes_read - old_count + self.assertLessEqual(read_count, read_buffer_size) + + # eof flags test + fp.seek(0, os.SEEK_END) + fp.seek(12345, os.SEEK_SET) + current_pos = 12345 + arr = fp.read(read_length) + current_pos += read_length + self.assertEqual(arr, txt[current_pos - read_length:current_pos]) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/threading.py b/Lib/threading.py index 78e591124278fc..da9cdf0b09d83c 100644 --- a/Lib/threading.py +++ b/Lib/threading.py @@ -3,7 +3,6 @@ import os as _os import sys as _sys import _thread -import warnings from time import monotonic as _time from _weakrefset import WeakSet @@ -133,6 +132,7 @@ def RLock(*args, **kwargs): """ if args or kwargs: + import warnings warnings.warn( 'Passing arguments to RLock is deprecated and will be removed in 3.15', DeprecationWarning, @@ -694,7 +694,7 @@ def __init__(self, parties, action=None, timeout=None): """ if parties < 1: - raise ValueError("parties must be > 0") + raise ValueError("parties must be >= 1") self._cond = Condition(Lock()) self._action = action self._timeout = timeout diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index d494c0c9687cd1..0baed8b569e40f 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -2741,6 +2741,8 @@ def _setup(self, master, cnf): del cnf['name'] if not name: name = self.__class__.__name__.lower() + if name[-1].isdigit(): + name += "!" # Avoid duplication when calculating names below if master._last_child_ids is None: master._last_child_ids = {} count = master._last_child_ids.get(name, 0) + 1 diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 1a60fd32a77ea4..9ce95a62d961ba 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -169,6 +169,7 @@ def __init__(self): self.prev_row = 1 self.prev_col = 0 self.prev_type = None + self.prev_line = "" self.encoding = None def add_whitespace(self, start): @@ -176,14 +177,28 @@ def add_whitespace(self, start): if row < self.prev_row or row == self.prev_row and col < self.prev_col: raise ValueError("start ({},{}) precedes previous end ({},{})" .format(row, col, self.prev_row, self.prev_col)) - row_offset = row - self.prev_row - if row_offset: - self.tokens.append("\\\n" * row_offset) - self.prev_col = 0 + self.add_backslash_continuation(start) col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) + def add_backslash_continuation(self, start): + """Add backslash continuation characters if the row has increased + without encountering a newline token. + + This also inserts the correct amount of whitespace before the backslash. + """ + row = start[0] + row_offset = row - self.prev_row + if row_offset == 0: + return + + newline = '\r\n' if self.prev_line.endswith('\r\n') else '\n' + line = self.prev_line.rstrip('\\\r\n') + ws = ''.join(_itertools.takewhile(str.isspace, reversed(line))) + self.tokens.append(ws + f"\\{newline}" * row_offset) + self.prev_col = 0 + def escape_brackets(self, token): characters = [] consume_until_next_bracket = False @@ -243,8 +258,6 @@ def untokenize(self, iterable): end_line, end_col = end extra_chars = last_line.count("{{") + last_line.count("}}") end = (end_line, end_col + extra_chars) - elif tok_type in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END): - self.tokens.append(" ") self.add_whitespace(start) self.tokens.append(token) @@ -253,6 +266,7 @@ def untokenize(self, iterable): self.prev_row += 1 self.prev_col = 0 self.prev_type = tok_type + self.prev_line = line return "".join(self.tokens) def compat(self, token, iterable): diff --git a/Lib/tomllib/_parser.py b/Lib/tomllib/_parser.py index 4d208bcfb4a9a6..0e522c3a69e6fe 100644 --- a/Lib/tomllib/_parser.py +++ b/Lib/tomllib/_parser.py @@ -4,11 +4,7 @@ from __future__ import annotations -from collections.abc import Iterable -import string from types import MappingProxyType -from typing import Any, BinaryIO, NamedTuple -import warnings from ._re import ( RE_DATETIME, @@ -18,7 +14,13 @@ match_to_localtime, match_to_number, ) -from ._types import Key, ParseFloat, Pos + +TYPE_CHECKING = False +if TYPE_CHECKING: + from collections.abc import Iterable + from typing import IO, Any + + from ._types import Key, ParseFloat, Pos ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) @@ -34,9 +36,11 @@ TOML_WS = frozenset(" \t") TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") -BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +BARE_KEY_CHARS = frozenset( + "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789" "-_" +) KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") -HEXDIGIT_CHARS = frozenset(string.hexdigits) +HEXDIGIT_CHARS = frozenset("abcdef" "ABCDEF" "0123456789") BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( { @@ -80,6 +84,8 @@ def __init__( or not isinstance(doc, str) or not isinstance(pos, int) ): + import warnings + warnings.warn( "Free-form arguments for TOMLDecodeError are deprecated. " "Please set 'msg' (str), 'doc' (str) and 'pos' (int) arguments only.", @@ -115,7 +121,7 @@ def __init__( self.colno = colno -def load(fp: BinaryIO, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: +def load(fp: IO[bytes], /, *, parse_float: ParseFloat = float) -> dict[str, Any]: """Parse TOML from a binary file object.""" b = fp.read() try: @@ -139,7 +145,7 @@ def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # n f"Expected str object, not '{type(s).__qualname__}'" ) from None pos = 0 - out = Output(NestedDict(), Flags()) + out = Output() header: Key = () parse_float = make_safe_parse_float(parse_float) @@ -290,9 +296,10 @@ def append_nest_to_list(self, key: Key) -> None: cont[last_key] = [{}] -class Output(NamedTuple): - data: NestedDict - flags: Flags +class Output: + def __init__(self) -> None: + self.data = NestedDict() + self.flags = Flags() def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: diff --git a/Lib/tomllib/_re.py b/Lib/tomllib/_re.py index 9eacefc729544e..1ca6bef77a0b03 100644 --- a/Lib/tomllib/_re.py +++ b/Lib/tomllib/_re.py @@ -7,9 +7,12 @@ from datetime import date, datetime, time, timedelta, timezone, tzinfo from functools import lru_cache import re -from typing import Any -from ._types import ParseFloat +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Any + + from ._types import ParseFloat # E.g. # - 00:32:00.999999 diff --git a/Lib/traceback.py b/Lib/traceback.py index 6367c00e4d4b86..31c73efcef5a52 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -135,7 +135,7 @@ def print_exception(exc, /, value=_sentinel, tb=_sentinel, limit=None, \ def _print_exception_bltin(exc, /): file = sys.stderr if sys.stderr is not None else sys.__stderr__ - colorize = _colorize.can_colorize() + colorize = _colorize.can_colorize(file=file) return print_exception(exc, limit=BUILTIN_EXCEPTION_LIMIT, file=file, colorize=colorize) @@ -1283,7 +1283,7 @@ def _format_syntax_error(self, stype, **kwargs): filename_suffix = ' ({})'.format(self.filename) text = self.text - if text is not None: + if isinstance(text, str): # text = " foo\n" # rtext = " foo" # ltext = "foo" @@ -1292,10 +1292,17 @@ def _format_syntax_error(self, stype, **kwargs): spaces = len(rtext) - len(ltext) if self.offset is None: yield ' {}\n'.format(ltext) - else: + elif isinstance(self.offset, int): offset = self.offset if self.lineno == self.end_lineno: - end_offset = self.end_offset if self.end_offset not in {None, 0} else offset + end_offset = ( + self.end_offset + if ( + isinstance(self.end_offset, int) + and self.end_offset != 0 + ) + else offset + ) else: end_offset = len(rtext) + 1 diff --git a/Lib/turtle.py b/Lib/turtle.py index 1320cfd93fd6db..e88981d298ad52 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -107,6 +107,7 @@ from os.path import isfile, split, join from pathlib import Path +from contextlib import contextmanager from copy import deepcopy from tkinter import simpledialog @@ -114,23 +115,24 @@ 'RawTurtle', 'Turtle', 'RawPen', 'Pen', 'Shape', 'Vec2D'] _tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye', 'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas', - 'getshapes', 'listen', 'mainloop', 'mode', 'numinput', + 'getshapes', 'listen', 'mainloop', 'mode', 'no_animation', 'numinput', 'onkey', 'onkeypress', 'onkeyrelease', 'onscreenclick', 'ontimer', 'register_shape', 'resetscreen', 'screensize', 'save', 'setup', - 'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', 'update', - 'window_height', 'window_width'] + 'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', + 'update', 'window_height', 'window_width'] _tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk', 'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color', 'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd', - 'fillcolor', 'filling', 'forward', 'get_poly', 'getpen', 'getscreen', 'get_shapepoly', - 'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown', - 'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd', - 'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position', - 'pu', 'radians', 'right', 'reset', 'resizemode', 'rt', - 'seth', 'setheading', 'setpos', 'setposition', - 'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle', - 'speed', 'st', 'stamp', 'teleport', 'tilt', 'tiltangle', 'towards', - 'turtlesize', 'undo', 'undobufferentries', 'up', 'width', + 'fillcolor', 'fill', 'filling', 'forward', 'get_poly', 'getpen', + 'getscreen', 'get_shapepoly', 'getturtle', 'goto', 'heading', + 'hideturtle', 'home', 'ht', 'isdown', 'isvisible', 'left', 'lt', + 'onclick', 'ondrag', 'onrelease', 'pd', 'pen', 'pencolor', 'pendown', + 'pensize', 'penup', 'poly', 'pos', 'position', 'pu', 'radians', 'right', + 'reset', 'resizemode', 'rt', 'seth', 'setheading', 'setpos', + 'setposition', 'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', + 'shapetransform', 'shearfactor', 'showturtle', 'speed', 'st', 'stamp', + 'teleport', 'tilt', 'tiltangle', 'towards', 'turtlesize', 'undo', + 'undobufferentries', 'up', 'width', 'write', 'xcor', 'ycor'] _tg_utilities = ['write_docstringdict', 'done'] @@ -1275,6 +1277,26 @@ def delay(self, delay=None): return self._delayvalue self._delayvalue = int(delay) + @contextmanager + def no_animation(self): + """Temporarily turn off auto-updating the screen. + + This is useful for drawing complex shapes where even the fastest setting + is too slow. Once this context manager is exited, the drawing will + be displayed. + + Example (for a TurtleScreen instance named screen + and a Turtle instance named turtle): + >>> with screen.no_animation(): + ... turtle.circle(50) + """ + tracer = self.tracer() + try: + self.tracer(0) + yield + finally: + self.tracer(tracer) + def _incrementudc(self): """Increment update counter.""" if not TurtleScreen._RUNNING: @@ -3380,6 +3402,24 @@ def filling(self): """ return isinstance(self._fillpath, list) + @contextmanager + def fill(self): + """A context manager for filling a shape. + + Implicitly ensures the code block is wrapped with + begin_fill() and end_fill(). + + Example (for a Turtle instance named turtle): + >>> turtle.color("black", "red") + >>> with turtle.fill(): + ... turtle.circle(60) + """ + self.begin_fill() + try: + yield + finally: + self.end_fill() + def begin_fill(self): """Called just before drawing a shape to be filled. @@ -3400,7 +3440,6 @@ def begin_fill(self): self.undobuffer.push(("beginfill", self._fillitem)) self._update() - def end_fill(self): """Fill the shape drawn after the call begin_fill(). @@ -3504,6 +3543,27 @@ def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")): if self.undobuffer: self.undobuffer.cumulate = False + @contextmanager + def poly(self): + """A context manager for recording the vertices of a polygon. + + Implicitly ensures that the code block is wrapped with + begin_poly() and end_poly() + + Example (for a Turtle instance named turtle) where we create a + triangle as the polygon and move the turtle 100 steps forward: + >>> with turtle.poly(): + ... for side in range(3) + ... turtle.forward(50) + ... turtle.right(60) + >>> turtle.forward(100) + """ + self.begin_poly() + try: + yield + finally: + self.end_poly() + def begin_poly(self): """Start recording the vertices of a polygon. diff --git a/Lib/typing.py b/Lib/typing.py index e69b485422cbd2..66570db7a5bd74 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1024,7 +1024,7 @@ def evaluate_forward_ref( owner=None, globals=None, locals=None, - type_params=None, + type_params=_sentinel, format=annotationlib.Format.VALUE, _recursive_guard=frozenset(), ): diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index 55c79d353539ca..10c3b7e122371e 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -1321,13 +1321,71 @@ def assertIsInstance(self, obj, cls, msg=None): """Same as self.assertTrue(isinstance(obj, cls)), with a nicer default message.""" if not isinstance(obj, cls): - standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls) + if isinstance(cls, tuple): + standardMsg = f'{safe_repr(obj)} is not an instance of any of {cls!r}' + else: + standardMsg = f'{safe_repr(obj)} is not an instance of {cls!r}' self.fail(self._formatMessage(msg, standardMsg)) def assertNotIsInstance(self, obj, cls, msg=None): """Included for symmetry with assertIsInstance.""" if isinstance(obj, cls): - standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls) + if isinstance(cls, tuple): + for x in cls: + if isinstance(obj, x): + cls = x + break + standardMsg = f'{safe_repr(obj)} is an instance of {cls!r}' + self.fail(self._formatMessage(msg, standardMsg)) + + def assertIsSubclass(self, cls, superclass, msg=None): + try: + if issubclass(cls, superclass): + return + except TypeError: + if not isinstance(cls, type): + self.fail(self._formatMessage(msg, f'{cls!r} is not a class')) + raise + if isinstance(superclass, tuple): + standardMsg = f'{cls!r} is not a subclass of any of {superclass!r}' + else: + standardMsg = f'{cls!r} is not a subclass of {superclass!r}' + self.fail(self._formatMessage(msg, standardMsg)) + + def assertNotIsSubclass(self, cls, superclass, msg=None): + try: + if not issubclass(cls, superclass): + return + except TypeError: + if not isinstance(cls, type): + self.fail(self._formatMessage(msg, f'{cls!r} is not a class')) + raise + if isinstance(superclass, tuple): + for x in superclass: + if issubclass(cls, x): + superclass = x + break + standardMsg = f'{cls!r} is a subclass of {superclass!r}' + self.fail(self._formatMessage(msg, standardMsg)) + + def assertHasAttr(self, obj, name, msg=None): + if not hasattr(obj, name): + if isinstance(obj, types.ModuleType): + standardMsg = f'module {obj.__name__!r} has no attribute {name!r}' + elif isinstance(obj, type): + standardMsg = f'type object {obj.__name__!r} has no attribute {name!r}' + else: + standardMsg = f'{type(obj).__name__!r} object has no attribute {name!r}' + self.fail(self._formatMessage(msg, standardMsg)) + + def assertNotHasAttr(self, obj, name, msg=None): + if hasattr(obj, name): + if isinstance(obj, types.ModuleType): + standardMsg = f'module {obj.__name__!r} has unexpected attribute {name!r}' + elif isinstance(obj, type): + standardMsg = f'type object {obj.__name__!r} has unexpected attribute {name!r}' + else: + standardMsg = f'{type(obj).__name__!r} object has unexpected attribute {name!r}' self.fail(self._formatMessage(msg, standardMsg)) def assertRaisesRegex(self, expected_exception, expected_regex, @@ -1391,6 +1449,80 @@ def assertNotRegex(self, text, unexpected_regex, msg=None): msg = self._formatMessage(msg, standardMsg) raise self.failureException(msg) + def _tail_type_check(self, s, tails, msg): + if not isinstance(tails, tuple): + tails = (tails,) + for tail in tails: + if isinstance(tail, str): + if not isinstance(s, str): + self.fail(self._formatMessage(msg, + f'Expected str, not {type(s).__name__}')) + elif isinstance(tail, (bytes, bytearray)): + if not isinstance(s, (bytes, bytearray)): + self.fail(self._formatMessage(msg, + f'Expected bytes, not {type(s).__name__}')) + + def assertStartsWith(self, s, prefix, msg=None): + try: + if s.startswith(prefix): + return + except (AttributeError, TypeError): + self._tail_type_check(s, prefix, msg) + raise + a = safe_repr(s, short=True) + b = safe_repr(prefix) + if isinstance(prefix, tuple): + standardMsg = f"{a} doesn't start with any of {b}" + else: + standardMsg = f"{a} doesn't start with {b}" + self.fail(self._formatMessage(msg, standardMsg)) + + def assertNotStartsWith(self, s, prefix, msg=None): + try: + if not s.startswith(prefix): + return + except (AttributeError, TypeError): + self._tail_type_check(s, prefix, msg) + raise + if isinstance(prefix, tuple): + for x in prefix: + if s.startswith(x): + prefix = x + break + a = safe_repr(s, short=True) + b = safe_repr(prefix) + self.fail(self._formatMessage(msg, f"{a} starts with {b}")) + + def assertEndsWith(self, s, suffix, msg=None): + try: + if s.endswith(suffix): + return + except (AttributeError, TypeError): + self._tail_type_check(s, suffix, msg) + raise + a = safe_repr(s, short=True) + b = safe_repr(suffix) + if isinstance(suffix, tuple): + standardMsg = f"{a} doesn't end with any of {b}" + else: + standardMsg = f"{a} doesn't end with {b}" + self.fail(self._formatMessage(msg, standardMsg)) + + def assertNotEndsWith(self, s, suffix, msg=None): + try: + if not s.endswith(suffix): + return + except (AttributeError, TypeError): + self._tail_type_check(s, suffix, msg) + raise + if isinstance(suffix, tuple): + for x in suffix: + if s.endswith(x): + suffix = x + break + a = safe_repr(s, short=True) + b = safe_repr(suffix) + self.fail(self._formatMessage(msg, f"{a} ends with {b}")) class FunctionTestCase(TestCase): diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py index 97262735aa8311..b8ea396db6772e 100644 --- a/Lib/unittest/result.py +++ b/Lib/unittest/result.py @@ -191,7 +191,8 @@ def _exc_info_to_string(self, err, test): capture_locals=self.tb_locals, compact=True) from _colorize import can_colorize - msgLines = list(tb_e.format(colorize=can_colorize())) + colorize = hasattr(self, "stream") and can_colorize(file=self.stream) + msgLines = list(tb_e.format(colorize=colorize)) if self.buffer: output = sys.stdout.getvalue() diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py index d60c295a1eddf7..eb0234a2617680 100644 --- a/Lib/unittest/runner.py +++ b/Lib/unittest/runner.py @@ -45,7 +45,7 @@ def __init__(self, stream, descriptions, verbosity, *, durations=None): self.showAll = verbosity > 1 self.dots = verbosity == 1 self.descriptions = descriptions - self._ansi = get_colors() + self._ansi = get_colors(file=stream) self._newline = True self.durations = durations @@ -286,7 +286,7 @@ def run(self, test): expected_fails, unexpected_successes, skipped = results infos = [] - ansi = get_colors() + ansi = get_colors(file=self.stream) bold_red = ansi.BOLD_RED green = ansi.GREEN red = ansi.RED diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index c412c729852272..9d51f4c6812b57 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -439,6 +439,23 @@ def _checknetloc(netloc): raise ValueError("netloc '" + netloc + "' contains invalid " + "characters under NFKC normalization") +def _check_bracketed_netloc(netloc): + # Note that this function must mirror the splitting + # done in NetlocResultMixins._hostinfo(). + hostname_and_port = netloc.rpartition('@')[2] + before_bracket, have_open_br, bracketed = hostname_and_port.partition('[') + if have_open_br: + # No data is allowed before a bracket. + if before_bracket: + raise ValueError("Invalid IPv6 URL") + hostname, _, port = bracketed.partition(']') + # No data is allowed after the bracket but before the port delimiter. + if port and not port.startswith(":"): + raise ValueError("Invalid IPv6 URL") + else: + hostname, _, port = hostname_and_port.partition(':') + _check_bracketed_host(hostname) + # Valid bracketed hosts are defined in # https://www.rfc-editor.org/rfc/rfc3986#page-49 and https://url.spec.whatwg.org/ def _check_bracketed_host(hostname): @@ -505,8 +522,7 @@ def _urlsplit(url, scheme=None, allow_fragments=True): (']' in netloc and '[' not in netloc)): raise ValueError("Invalid IPv6 URL") if '[' in netloc and ']' in netloc: - bracketed_host = netloc.partition('[')[2].partition(']')[0] - _check_bracketed_host(bracketed_host) + _check_bracketed_netloc(netloc) if allow_fragments and '#' in url: url, fragment = url.split('#', 1) if '?' in url: diff --git a/Lib/urllib/robotparser.py b/Lib/urllib/robotparser.py index c58565e3945146..409f2b2e48de6e 100644 --- a/Lib/urllib/robotparser.py +++ b/Lib/urllib/robotparser.py @@ -11,6 +11,7 @@ """ import collections +import urllib.error import urllib.parse import urllib.request @@ -65,6 +66,7 @@ def read(self): self.disallow_all = True elif err.code >= 400 and err.code < 500: self.allow_all = True + err.close() else: raw = f.read() self.parse(raw.decode("utf-8").splitlines()) diff --git a/Lib/uuid.py b/Lib/uuid.py index 9c6ad9643cf6d5..36809b85cb8ceb 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -42,6 +42,14 @@ # make a UUID from a 16-byte string >>> uuid.UUID(bytes=x.bytes) UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') + + # get the Nil UUID + >>> uuid.NIL + UUID('00000000-0000-0000-0000-000000000000') + + # get the Max UUID + >>> uuid.MAX + UUID('ffffffff-ffff-ffff-ffff-ffffffffffff') """ import os @@ -85,6 +93,17 @@ class SafeUUID: unknown = None +_UINT_128_MAX = (1 << 128) - 1 +# 128-bit mask to clear the variant and version bits of a UUID integral value +_RFC_4122_CLEARFLAGS_MASK = ~((0xf000 << 64) | (0xc000 << 48)) +# RFC 4122 variant bits and version bits to activate on a UUID integral value. +_RFC_4122_VERSION_1_FLAGS = ((1 << 76) | (0x8000 << 48)) +_RFC_4122_VERSION_3_FLAGS = ((3 << 76) | (0x8000 << 48)) +_RFC_4122_VERSION_4_FLAGS = ((4 << 76) | (0x8000 << 48)) +_RFC_4122_VERSION_5_FLAGS = ((5 << 76) | (0x8000 << 48)) +_RFC_4122_VERSION_8_FLAGS = ((8 << 76) | (0x8000 << 48)) + + class UUID: """Instances of the UUID class represent UUIDs as specified in RFC 4122. UUID objects are immutable, hashable, and usable as dictionary keys. @@ -174,57 +193,69 @@ def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None, if [hex, bytes, bytes_le, fields, int].count(None) != 4: raise TypeError('one of the hex, bytes, bytes_le, fields, ' 'or int arguments must be given') - if hex is not None: + if int is not None: + pass + elif hex is not None: hex = hex.replace('urn:', '').replace('uuid:', '') hex = hex.strip('{}').replace('-', '') if len(hex) != 32: raise ValueError('badly formed hexadecimal UUID string') int = int_(hex, 16) - if bytes_le is not None: + elif bytes_le is not None: if len(bytes_le) != 16: raise ValueError('bytes_le is not a 16-char string') + assert isinstance(bytes_le, bytes_), repr(bytes_le) bytes = (bytes_le[4-1::-1] + bytes_le[6-1:4-1:-1] + bytes_le[8-1:6-1:-1] + bytes_le[8:]) - if bytes is not None: + int = int_.from_bytes(bytes) # big endian + elif bytes is not None: if len(bytes) != 16: raise ValueError('bytes is not a 16-char string') assert isinstance(bytes, bytes_), repr(bytes) int = int_.from_bytes(bytes) # big endian - if fields is not None: + elif fields is not None: if len(fields) != 6: raise ValueError('fields is not a 6-tuple') (time_low, time_mid, time_hi_version, clock_seq_hi_variant, clock_seq_low, node) = fields - if not 0 <= time_low < 1<<32: + if not 0 <= time_low < (1 << 32): raise ValueError('field 1 out of range (need a 32-bit value)') - if not 0 <= time_mid < 1<<16: + if not 0 <= time_mid < (1 << 16): raise ValueError('field 2 out of range (need a 16-bit value)') - if not 0 <= time_hi_version < 1<<16: + if not 0 <= time_hi_version < (1 << 16): raise ValueError('field 3 out of range (need a 16-bit value)') - if not 0 <= clock_seq_hi_variant < 1<<8: + if not 0 <= clock_seq_hi_variant < (1 << 8): raise ValueError('field 4 out of range (need an 8-bit value)') - if not 0 <= clock_seq_low < 1<<8: + if not 0 <= clock_seq_low < (1 << 8): raise ValueError('field 5 out of range (need an 8-bit value)') - if not 0 <= node < 1<<48: + if not 0 <= node < (1 << 48): raise ValueError('field 6 out of range (need a 48-bit value)') clock_seq = (clock_seq_hi_variant << 8) | clock_seq_low int = ((time_low << 96) | (time_mid << 80) | (time_hi_version << 64) | (clock_seq << 48) | node) - if int is not None: - if not 0 <= int < 1<<128: - raise ValueError('int is out of range (need a 128-bit value)') + if not 0 <= int <= _UINT_128_MAX: + raise ValueError('int is out of range (need a 128-bit value)') if version is not None: if not 1 <= version <= 8: raise ValueError('illegal version number') + # clear the variant and the version number bits + int &= _RFC_4122_CLEARFLAGS_MASK # Set the variant to RFC 4122/9562. - int &= ~(0xc000 << 48) - int |= 0x8000 << 48 + int |= 0x8000_0000_0000_0000 # (0x8000 << 48) # Set the version number. - int &= ~(0xf000 << 64) int |= version << 76 object.__setattr__(self, 'int', int) object.__setattr__(self, 'is_safe', is_safe) + @classmethod + def _from_int(cls, value): + """Create a UUID from an integer *value*. Internal use only.""" + assert 0 <= value <= _UINT_128_MAX, repr(value) + self = object.__new__(cls) + object.__setattr__(self, 'int', value) + object.__setattr__(self, 'is_safe', SafeUUID.unknown) + return self + def __getstate__(self): d = {'int': self.int} if self.is_safe != SafeUUID.unknown: @@ -700,24 +731,30 @@ def uuid3(namespace, name): """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" if isinstance(name, str): name = bytes(name, "utf-8") - from hashlib import md5 - digest = md5( - namespace.bytes + name, - usedforsecurity=False - ).digest() - return UUID(bytes=digest[:16], version=3) + import hashlib + h = hashlib.md5(namespace.bytes + name, usedforsecurity=False) + int_uuid_3 = int.from_bytes(h.digest()) + int_uuid_3 &= _RFC_4122_CLEARFLAGS_MASK + int_uuid_3 |= _RFC_4122_VERSION_3_FLAGS + return UUID._from_int(int_uuid_3) def uuid4(): """Generate a random UUID.""" - return UUID(bytes=os.urandom(16), version=4) + int_uuid_4 = int.from_bytes(os.urandom(16)) + int_uuid_4 &= _RFC_4122_CLEARFLAGS_MASK + int_uuid_4 |= _RFC_4122_VERSION_4_FLAGS + return UUID._from_int(int_uuid_4) def uuid5(namespace, name): """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" if isinstance(name, str): name = bytes(name, "utf-8") - from hashlib import sha1 - hash = sha1(namespace.bytes + name).digest() - return UUID(bytes=hash[:16], version=5) + import hashlib + h = hashlib.sha1(namespace.bytes + name, usedforsecurity=False) + int_uuid_5 = int.from_bytes(h.digest()[:16]) + int_uuid_5 &= _RFC_4122_CLEARFLAGS_MASK + int_uuid_5 |= _RFC_4122_VERSION_5_FLAGS + return UUID._from_int(int_uuid_5) def uuid8(a=None, b=None, c=None): """Generate a UUID from three custom blocks. @@ -740,7 +777,9 @@ def uuid8(a=None, b=None, c=None): int_uuid_8 = (a & 0xffff_ffff_ffff) << 80 int_uuid_8 |= (b & 0xfff) << 64 int_uuid_8 |= c & 0x3fff_ffff_ffff_ffff - return UUID(int=int_uuid_8, version=8) + # by construction, the variant and version bits are already cleared + int_uuid_8 |= _RFC_4122_VERSION_8_FLAGS + return UUID._from_int(int_uuid_8) def main(): """Run the uuid command line interface.""" @@ -799,5 +838,10 @@ def main(): NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8') NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8') +# RFC 9562 Sections 5.9 and 5.10 define the special Nil and Max UUID formats. + +NIL = UUID('00000000-0000-0000-0000-000000000000') +MAX = UUID('ffffffff-ffff-ffff-ffff-ffffffffffff') + if __name__ == "__main__": main() diff --git a/Lib/warnings.py b/Lib/warnings.py index e83cde37ab2d1a..f20b01372dd7a4 100644 --- a/Lib/warnings.py +++ b/Lib/warnings.py @@ -185,24 +185,32 @@ def simplefilter(action, category=Warning, lineno=0, append=False): raise ValueError("lineno must be an int >= 0") _add_filter(action, None, category, None, lineno, append=append) +def _filters_mutated(): + # Even though this function is not part of the public API, it's used by + # a fair amount of user code. + with _lock: + _filters_mutated_lock_held() + def _add_filter(*item, append): - # Remove possible duplicate filters, so new one will be placed - # in correct place. If append=True and duplicate exists, do nothing. - if not append: - try: - filters.remove(item) - except ValueError: - pass - filters.insert(0, item) - else: - if item not in filters: - filters.append(item) - _filters_mutated() + with _lock: + if not append: + # Remove possible duplicate filters, so new one will be placed + # in correct place. If append=True and duplicate exists, do nothing. + try: + filters.remove(item) + except ValueError: + pass + filters.insert(0, item) + else: + if item not in filters: + filters.append(item) + _filters_mutated_lock_held() def resetwarnings(): """Clear the list of warning filters, so that no filters are active.""" - filters[:] = [] - _filters_mutated() + with _lock: + filters[:] = [] + _filters_mutated_lock_held() class _OptionError(Exception): """Exception used by option processing helpers.""" @@ -353,11 +361,6 @@ def warn_explicit(message, category, filename, lineno, module = filename or "" if module[-3:].lower() == ".py": module = module[:-3] # XXX What about leading pathname? - if registry is None: - registry = {} - if registry.get('version', 0) != _filters_version: - registry.clear() - registry['version'] = _filters_version if isinstance(message, Warning): text = str(message) category = message.__class__ @@ -365,52 +368,59 @@ def warn_explicit(message, category, filename, lineno, text = message message = category(message) key = (text, category, lineno) - # Quick test for common case - if registry.get(key): - return - # Search the filters - for item in filters: - action, msg, cat, mod, ln = item - if ((msg is None or msg.match(text)) and - issubclass(category, cat) and - (mod is None or mod.match(module)) and - (ln == 0 or lineno == ln)): - break - else: - action = defaultaction - # Early exit actions - if action == "ignore": - return + with _lock: + if registry is None: + registry = {} + if registry.get('version', 0) != _filters_version: + registry.clear() + registry['version'] = _filters_version + # Quick test for common case + if registry.get(key): + return + # Search the filters + for item in filters: + action, msg, cat, mod, ln = item + if ((msg is None or msg.match(text)) and + issubclass(category, cat) and + (mod is None or mod.match(module)) and + (ln == 0 or lineno == ln)): + break + else: + action = defaultaction + # Early exit actions + if action == "ignore": + return + + if action == "error": + raise message + # Other actions + if action == "once": + registry[key] = 1 + oncekey = (text, category) + if onceregistry.get(oncekey): + return + onceregistry[oncekey] = 1 + elif action in {"always", "all"}: + pass + elif action == "module": + registry[key] = 1 + altkey = (text, category, 0) + if registry.get(altkey): + return + registry[altkey] = 1 + elif action == "default": + registry[key] = 1 + else: + # Unrecognized actions are errors + raise RuntimeError( + "Unrecognized action (%r) in warnings.filters:\n %s" % + (action, item)) # Prime the linecache for formatting, in case the # "file" is actually in a zipfile or something. import linecache linecache.getlines(filename, module_globals) - if action == "error": - raise message - # Other actions - if action == "once": - registry[key] = 1 - oncekey = (text, category) - if onceregistry.get(oncekey): - return - onceregistry[oncekey] = 1 - elif action in {"always", "all"}: - pass - elif action == "module": - registry[key] = 1 - altkey = (text, category, 0) - if registry.get(altkey): - return - registry[altkey] = 1 - elif action == "default": - registry[key] = 1 - else: - # Unrecognized actions are errors - raise RuntimeError( - "Unrecognized action (%r) in warnings.filters:\n %s" % - (action, item)) # Print message and context msg = WarningMessage(message, category, filename, lineno, source) _showwarnmsg(msg) @@ -488,30 +498,32 @@ def __enter__(self): if self._entered: raise RuntimeError("Cannot enter %r twice" % self) self._entered = True - self._filters = self._module.filters - self._module.filters = self._filters[:] - self._module._filters_mutated() - self._showwarning = self._module.showwarning - self._showwarnmsg_impl = self._module._showwarnmsg_impl + with _lock: + self._filters = self._module.filters + self._module.filters = self._filters[:] + self._module._filters_mutated_lock_held() + self._showwarning = self._module.showwarning + self._showwarnmsg_impl = self._module._showwarnmsg_impl + if self._record: + log = [] + self._module._showwarnmsg_impl = log.append + # Reset showwarning() to the default implementation to make sure + # that _showwarnmsg() calls _showwarnmsg_impl() + self._module.showwarning = self._module._showwarning_orig + else: + log = None if self._filter is not None: simplefilter(*self._filter) - if self._record: - log = [] - self._module._showwarnmsg_impl = log.append - # Reset showwarning() to the default implementation to make sure - # that _showwarnmsg() calls _showwarnmsg_impl() - self._module.showwarning = self._module._showwarning_orig - return log - else: - return None + return log def __exit__(self, *exc_info): if not self._entered: raise RuntimeError("Cannot exit %r without entering first" % self) - self._module.filters = self._filters - self._module._filters_mutated() - self._module.showwarning = self._showwarning - self._module._showwarnmsg_impl = self._showwarnmsg_impl + with _lock: + self._module.filters = self._filters + self._module._filters_mutated_lock_held() + self._module.showwarning = self._showwarning + self._module._showwarnmsg_impl = self._showwarnmsg_impl class deprecated: @@ -701,18 +713,36 @@ def extract(): # If either if the compiled regexs are None, match anything. try: from _warnings import (filters, _defaultaction, _onceregistry, - warn, warn_explicit, _filters_mutated) + warn, warn_explicit, + _filters_mutated_lock_held, + _acquire_lock, _release_lock, + ) defaultaction = _defaultaction onceregistry = _onceregistry _warnings_defaults = True + + class _Lock: + def __enter__(self): + _acquire_lock() + return self + + def __exit__(self, *args): + _release_lock() + + _lock = _Lock() + except ImportError: filters = [] defaultaction = "default" onceregistry = {} + import _thread + + _lock = _thread.RLock() + _filters_version = 1 - def _filters_mutated(): + def _filters_mutated_lock_held(): global _filters_version _filters_version += 1 diff --git a/Lib/xml/dom/xmlbuilder.py b/Lib/xml/dom/xmlbuilder.py index 8a200263497b89..a8852625a2f9a2 100644 --- a/Lib/xml/dom/xmlbuilder.py +++ b/Lib/xml/dom/xmlbuilder.py @@ -189,7 +189,7 @@ def parse(self, input): options.filter = self.filter options.errorHandler = self.errorHandler fp = input.byteStream - if fp is None and options.systemId: + if fp is None and input.systemId: import urllib.request fp = urllib.request.urlopen(input.systemId) return self._parse_bytestream(fp, options) @@ -247,10 +247,12 @@ def _create_opener(self): def _guess_media_encoding(self, source): info = source.byteStream.info() - if "Content-Type" in info: - for param in info.getplist(): - if param.startswith("charset="): - return param.split("=", 1)[1].lower() + # import email.message + # assert isinstance(info, email.message.Message) + charset = info.get_param('charset') + if charset is not None: + return charset.lower() + return None class DOMInputSource(object): diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index 052ef47b8f6598..b8b496ad9471f4 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -13,7 +13,6 @@ import sys import threading import time -from typing import Self try: import zlib # We may need its compression method @@ -606,7 +605,7 @@ def from_file(cls, filename, arcname=None, *, strict_timestamps=True): return zinfo - def _for_archive(self, archive: ZipFile) -> Self: + def _for_archive(self, archive): """Resolve suitable defaults from the archive. Resolve the date_time, compression attributes, and external attributes @@ -614,7 +613,11 @@ def _for_archive(self, archive: ZipFile) -> Self: Return self. """ - self.date_time = time.localtime(time.time())[:6] + # gh-91279: Set the SOURCE_DATE_EPOCH to a specific timestamp + epoch = os.environ.get('SOURCE_DATE_EPOCH') + get_time = int(epoch) if epoch else time.time() + self.date_time = time.localtime(get_time)[:6] + self.compress_type = archive.compression self.compress_level = archive.compresslevel if self.filename.endswith('/'): # pragma: no cover @@ -1184,13 +1187,15 @@ def seek(self, offset, whence=os.SEEK_SET): self._offset = buff_offset read_offset = 0 # Fast seek uncompressed unencrypted file - elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset > 0: + elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset != 0: # disable CRC checking after first seeking - it would be invalid self._expected_crc = None # seek actual file taking already buffered data into account read_offset -= len(self._readbuffer) - self._offset self._fileobj.seek(read_offset, os.SEEK_CUR) self._left -= read_offset + self._compress_left -= read_offset + self._eof = self._left <= 0 read_offset = 0 # flush read buffer self._readbuffer = b'' diff --git a/Misc/ACKS b/Misc/ACKS index d7585c16c8169c..47c8d2b40aafb7 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -189,6 +189,7 @@ Stéphane Blondon Eric Blossom Sergey Bobrov Finn Bock +VojtÄ›ch BoÄek Paul Boddie Matthew Boedicker Robin Boerdijk @@ -1038,6 +1039,7 @@ Erno Kuusela Kabir Kwatra Ross Lagerwall Cameron Laird +Filipe Laíns Loïc Lajeanne Alexander Lakeev David Lam @@ -1474,6 +1476,7 @@ Michael Pomraning Martin Pool Iustin Pop Claudiu Popa +Nick Pope John Popplewell Matheus Vieira Portela Davin Potts @@ -1921,6 +1924,7 @@ Bill Tutt Fraser Tweedale Doobee R. Tzeck Eren Türkay +Stan Ulbrych Lionel Ulmer Adnan Umer Utkarsh Upadhyay @@ -1968,6 +1972,7 @@ Johannes Vogel Michael Vogt Radu Voicilas Alex Volkov +Illia Volochii Ruben Vorderman Guido Vranken Martijn Vries diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index 25c6b827146e82..406a5d7853edc0 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -941,7 +941,7 @@ result from ``entry_points()`` as deprecated. .. -.. gh: 47383 +.. gh-issue: 47383 .. date: 2021-04-08-19-32-26 .. nonce: YI1hdL .. section: Library diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index 85cb0f1b5cffbd..87442dbbbd17f5 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -570,7 +570,7 @@ planned). Patch by Alex Waygood. .. -.. gh: 78157 +.. gh-issue: 78157 .. date: 2022-05-05-20-40-45 .. nonce: IA_9na .. section: Library @@ -1289,7 +1289,7 @@ Deprecate the chunk module. .. -.. gh: 91498 +.. gh-issue: 91498 .. date: 2022-04-10-08-39-44 .. nonce: 8oII92 .. section: Library diff --git a/Misc/NEWS.d/3.14.0a4.rst b/Misc/NEWS.d/3.14.0a4.rst new file mode 100644 index 00000000000000..1e08b36020386c --- /dev/null +++ b/Misc/NEWS.d/3.14.0a4.rst @@ -0,0 +1,830 @@ +.. date: 2024-12-22-08-54-30 +.. gh-issue: 127592 +.. nonce: iyuFCC +.. release date: 2025-01-14 +.. section: macOS + +Usage of the unified Apple System Log APIs was disabled when the minimum +macOS version is earlier than 10.12. + +.. + +.. date: 2025-01-03-23-51-07 +.. gh-issue: 128152 +.. nonce: IhzElS +.. section: Tools/Demos + +Fix a bug where Argument Clinic's C pre-processor parser tried to parse +pre-processor directives inside C comments. Patch by Erlend Aasland. + +.. + +.. date: 2025-01-13-01-29-08 +.. gh-issue: 128690 +.. nonce: cPFVDb +.. section: Tests + +Temporarily do not use test_embed in PGO profile builds until the problem +with test_init_pyvenv_cfg failing in some configurations is resolved. + +.. + +.. date: 2025-01-11-13-40-12 +.. gh-issue: 128731 +.. nonce: qpKlai +.. section: Library + +Fix :exc:`ResourceWarning` in +:meth:`urllib.robotparser.RobotFileParser.read`. + +.. + +.. date: 2025-01-10-15-06-45 +.. gh-issue: 71339 +.. nonce: EKnpzw +.. section: Library + +Add new assertion methods for :mod:`unittest`: +:meth:`~unittest.TestCase.assertHasAttr`, +:meth:`~unittest.TestCase.assertNotHasAttr`, +:meth:`~unittest.TestCase.assertIsSubclass`, +:meth:`~unittest.TestCase.assertNotIsSubclass` +:meth:`~unittest.TestCase.assertStartsWith`, +:meth:`~unittest.TestCase.assertNotStartsWith`, +:meth:`~unittest.TestCase.assertEndsWith` and +:meth:`~unittest.TestCase.assertNotEndsWith`. + +.. + +.. date: 2025-01-10-13-34-33 +.. gh-issue: 118761 +.. nonce: qRB8nS +.. section: Library + +Improve import time of :mod:`pickle` by 25% by removing an unnecessary +regular expression. As such, :mod:`re` is no more implicitly available as +``pickle.re``. Patch by Bénédikt Tran. + +.. + +.. date: 2025-01-09-12-06-52 +.. gh-issue: 128661 +.. nonce: ixx_0z +.. section: Library + +Fixes :func:`typing.evaluate_forward_ref` not showing deprecation when +``type_params`` arg is not passed. + +.. + +.. date: 2025-01-08-03-09-29 +.. gh-issue: 128562 +.. nonce: Mlv-yO +.. section: Library + +Fix possible conflicts in generated :mod:`tkinter` widget names if the +widget class name ends with a digit. + +.. + +.. date: 2025-01-06-21-35-00 +.. gh-issue: 128559 +.. nonce: 6fxcDM +.. section: Library + +Improved import time of :mod:`asyncio`. + +.. + +.. date: 2025-01-06-18-41-08 +.. gh-issue: 128552 +.. nonce: fV-f8j +.. section: Library + +Fix cyclic garbage introduced by :meth:`asyncio.loop.create_task` and +:meth:`asyncio.TaskGroup.create_task` holding a reference to the created +task if it is eager. + +.. + +.. date: 2025-01-05-11-46-14 +.. gh-issue: 128340 +.. nonce: gKI0uU +.. section: Library + +Add internal thread safe handle to be used in +:meth:`asyncio.loop.call_soon_threadsafe` for thread safe cancellation. + +.. + +.. date: 2025-01-04-11-32-46 +.. gh-issue: 128182 +.. nonce: SJ2Zsa +.. section: Library + +Fix crash when using :mod:`ctypes` pointers concurrently on the :term:`free +threaded ` build. + +.. + +.. date: 2025-01-02-15-20-17 +.. gh-issue: 128400 +.. nonce: UMiG4f +.. section: Library + +Only show the current thread in :mod:`faulthandler` on the :term:`free +threaded ` build to prevent races. + +.. + +.. date: 2025-01-02-13-05-16 +.. gh-issue: 128400 +.. nonce: 5N43fF +.. section: Library + +Fix crash when using :func:`faulthandler.dump_traceback` while other threads +are active on the :term:`free threaded ` build. + +.. + +.. date: 2025-01-01-19-24-43 +.. gh-issue: 128388 +.. nonce: 8UdMz_ +.. section: Library + +Fix ``PyREPL`` on Windows to support more keybindings, like the +:kbd:`Control-â†` and :kbd:`Control-→` word-skipping keybindings and those +with meta (i.e. :kbd:`Alt`), e.g. :kbd:`Alt-d` to ``kill-word`` or +:kbd:`Alt-Backspace` ``backward-kill-word``. + +.. + +.. date: 2024-12-30-20-48-28 +.. gh-issue: 88834 +.. nonce: RIvgwc +.. section: Library + +Unify the instance check for :class:`typing.Union` and +:class:`types.UnionType`: :class:`!Union` now uses the instance checks +against its parameters instead of the subclass checks. + +.. + +.. date: 2024-12-29-13-49-46 +.. gh-issue: 128302 +.. nonce: psRpPN +.. section: Library + +Fix :meth:`!xml.dom.xmlbuilder.DOMEntityResolver.resolveEntity`, which was +broken by the Python 3.0 transition. + +.. + +.. date: 2024-12-29-00-33-34 +.. gh-issue: 128317 +.. nonce: WgFina +.. section: Library + +Highlight today in colour in :mod:`calendar`'s CLI output. Patch by Hugo van +Kemenade. + +.. + +.. date: 2024-12-27-16-28-57 +.. gh-issue: 128302 +.. nonce: 2GMvyl +.. section: Library + +Allow :meth:`!xml.dom.xmlbuilder.DOMParser.parse` to correctly handle +:class:`!xml.dom.xmlbuilder.DOMInputSource` instances that only have a +:attr:`!systemId` attribute set. + +.. + +.. date: 2024-12-21-11-12-50 +.. gh-issue: 128151 +.. nonce: aq7vpG +.. section: Library + +Improve generation of :class:`~uuid.UUID` objects version 3, 4, 5, and 8 via +their dedicated functions by 30%. Patch by Bénédikt Tran. + +.. + +.. date: 2024-12-20-10-57-10 +.. gh-issue: 128118 +.. nonce: mYak8i +.. section: Library + +Improve performance of :func:`copy.copy` by 30% via a fast path for atomic +types and container types. + +.. + +.. date: 2024-12-19-20-46-01 +.. gh-issue: 127946 +.. nonce: 4lM3Op +.. section: Library + +Fix crash when modifying :class:`ctypes._CFuncPtr` objects concurrently on +the :term:`free threaded ` build. + +.. + +.. date: 2024-12-18-10-18-55 +.. gh-issue: 128062 +.. nonce: E9oU7- +.. section: Library + +Revert the font of :mod:`turtledemo`'s menu bar to its default value and +display the shortcut keys in the correct position. + +.. + +.. date: 2024-12-18-00-07-50 +.. gh-issue: 128014 +.. nonce: F3aUbz +.. section: Library + +Fix resetting the default window icon by passing ``default=''`` to the +:mod:`tkinter` method :meth:`!wm_iconbitmap`. + +.. + +.. date: 2024-12-17-15-23-40 +.. gh-issue: 41872 +.. nonce: 31LjKY +.. section: Library + +Fix quick extraction of module docstrings from a file in :mod:`pydoc`. It +now supports docstrings with single quotes, escape sequences, raw string +literals, and other Python syntax. + +.. + +.. date: 2024-12-17-13-21-52 +.. gh-issue: 127060 +.. nonce: mv2bX6 +.. section: Library + +Set TERM environment variable to "dumb" to disable traceback colors in IDLE, +since IDLE doesn't understand ANSI escape sequences. Patch by Victor +Stinner. + +.. + +.. date: 2024-12-17-12-41-07 +.. gh-issue: 126742 +.. nonce: l07qvT +.. section: Library + +Fix support of localized error messages reported by :manpage:`dlerror(3)` +and :manpage:`gdbm_strerror ` in :mod:`ctypes` and :mod:`dbm.gnu` +functions respectively. Patch by Bénédikt Tran. + +.. + +.. date: 2024-12-13-14-21-04 +.. gh-issue: 122548 +.. nonce: hq3Vud +.. section: Library + +Adds two new local events to sys.monitoring, ``BRANCH_LEFT`` and +``BRANCH_RIGHT``. This allows the two arms of the branch to be disabled +independently, which should hugely improve performance of branch-level +coverage tools. The old branch event, ``BRANCH`` is now deprecated. + +.. + +.. date: 2024-12-12-07-27-51 +.. gh-issue: 127847 +.. nonce: ksfNKM +.. section: Library + +Fix the position when doing interleaved seeks and reads in uncompressed, +unencrypted zip files returned by :meth:`zipfile.ZipFile.open`. + +.. + +.. date: 2024-12-06-21-03-11 +.. gh-issue: 127688 +.. nonce: NJqtc- +.. section: Library + +Add the :data:`~os.SCHED_DEADLINE` and :data:`~os.SCHED_NORMAL` constants to +the :mod:`os` module. + +.. + +.. date: 2024-12-04-10-39-29 +.. gh-issue: 83662 +.. nonce: CG1s3m +.. section: Library + +Add missing ``__class_getitem__`` method to the Python implementation of +:func:`functools.partial`, to make it compatible with the C version. This is +mainly relevant for alternative Python implementations like PyPy and +GraalPy, because CPython will usually use the C-implementation of that +function. + +.. + +.. date: 2024-12-03-20-28-08 +.. gh-issue: 127586 +.. nonce: zgotYF +.. section: Library + +:class:`multiprocessing.pool.Pool` now properly restores blocked signal +handlers of the parent thread when creating processes via either *spawn* or +*forkserver*. + +.. + +.. date: 2024-12-03-14-45-16 +.. gh-issue: 98188 +.. nonce: GX9i2b +.. section: Library + +Fix an issue in :meth:`email.message.Message.get_payload` where data cannot +be decoded if the Content Transfer Encoding mechanism contains trailing +whitespaces or additional junk text. Patch by Hui Liu. + +.. + +.. date: 2024-12-02-19-13-19 +.. gh-issue: 127529 +.. nonce: Pj1Xtf +.. section: Library + +Correct behavior of +:func:`!asyncio.selector_events.BaseSelectorEventLoop._accept_connection` in +handling :exc:`ConnectionAbortedError` in a loop. This improves performance +on OpenBSD. + +.. + +.. date: 2024-11-28-14-24-12 +.. gh-issue: 127360 +.. nonce: HVKt-c +.. section: Library + +When a descriptive error message cannot be provided for an +:exc:`ssl.SSLError`, the "unknown error" message now shows the internal +error code (as retrieved by ``ERR_get_error`` and similar OpenSSL +functions). + +.. + +.. date: 2024-11-24-14-53-35 +.. gh-issue: 127196 +.. nonce: 8CBkUa +.. section: Library + +Fix crash when dict with keys in invalid encoding were passed to several +functions in ``_interpreters`` module. + +.. + +.. date: 2024-11-19-10-46-57 +.. gh-issue: 124130 +.. nonce: OZ_vR5 +.. section: Library + +Fix a bug in matching regular expression ``\B`` in empty input string. Now +it is always the opposite of ``\b``. To get an old behavior, use +``(?!\A\Z)\B``. To get a new behavior in old Python versions, use +``(?!\b)``. + +.. + +.. date: 2024-11-11-07-56-03 +.. gh-issue: 126639 +.. nonce: AmVSt- +.. section: Library + +:class:`tempfile.NamedTemporaryFile` will now issue a :exc:`ResourceWarning` +when it is finalized by the garbage collector without being explicitly +closed. + +.. + +.. date: 2024-11-09-15-59-51 +.. gh-issue: 126624 +.. nonce: bN53Va +.. section: Library + +Expose error code :data:`~xml.parsers.expat.errors.XML_ERROR_NOT_STARTED` of +Expat >=2.6.4 in :mod:`xml.parsers.expat.errors`. + +.. + +.. date: 2024-10-31-14-31-36 +.. gh-issue: 126225 +.. nonce: vTxGXm +.. section: Library + +:mod:`getopt` and :mod:`optparse` are no longer marked as deprecated. There +are legitimate reasons to use one of these modules in preference to +:mod:`argparse`, and none of these modules are at risk of being removed from +the standard library. Of the three, ``argparse`` remains the recommended +default choice, *unless* one of the concerns noted at the top of the +``optparse`` module documentation applies. + +.. + +.. date: 2024-10-04-09-56-45 +.. gh-issue: 124761 +.. nonce: N4pSD6 +.. section: Library + +Add :data:`~socket.SO_REUSEPORT_LB` constant to :mod:`socket` for FreeBSD. + +.. + +.. date: 2024-09-04-14-13-14 +.. gh-issue: 121720 +.. nonce: z9hhXQ +.. section: Library + +:class:`enum.EnumDict` can now be used without resorting to private API. + +.. + +.. date: 2024-08-28-16-10-37 +.. gh-issue: 123424 +.. nonce: u96_i6 +.. section: Library + +Add :meth:`zipfile.ZipInfo._for_archive` setting default properties on +:class:`~zipfile.ZipInfo` objects. Patch by Bénédikt Tran and Jason R. +Coombs. + +.. + +.. date: 2024-07-13-13-25-31 +.. gh-issue: 121676 +.. nonce: KDLS11 +.. section: Library + +Deprecate calling the Python implementation of :meth:`functools.reduce` with +a ``function`` or ``sequence`` as a :term:`keyword argument`. This will be +forbidden in Python 3.16 in order to match the C implementation. + +.. + +.. date: 2023-11-12-21-53-40 +.. gh-issue: 112015 +.. nonce: 2WPRxE +.. section: Library + +:func:`ctypes.memoryview_at` now exists to create a :class:`memoryview` +object that refers to the supplied pointer and length. This works like +:func:`ctypes.string_at` except it avoids a buffer copy, and is typically +useful when implementing pure Python callback functions that are passed +dynamically-sized buffers. + +.. + +.. date: 2022-07-28-12-32-59 +.. gh-issue: 95371 +.. nonce: F24IFC +.. section: Library + +Added support for other image formats (PNG, PGM, and PPM) to the turtle +module. Patch by Shin-myoung-serp. + +.. + +.. date: 2025-01-13-12-48-30 +.. gh-issue: 128078 +.. nonce: qOsl9B +.. section: Core and Builtins + +Fix a :exc:`SystemError` when using :func:`anext` with a default tuple +value. Patch by Bénédikt Tran. + +.. + +.. date: 2025-01-11-12-39-17 +.. gh-issue: 128717 +.. nonce: i65d06 +.. section: Core and Builtins + +Fix a crash when setting the recursion limit while other threads are active +on the :term:`free threaded ` build. + +.. + +.. date: 2025-01-09-11-46-57 +.. gh-issue: 124483 +.. nonce: KRtBeQ +.. section: Core and Builtins + +Treat ``Py_DECREF`` and variants as escaping when generating opcode and uop +metadata. This prevents the possibility of a ``__del__`` method causing the +JIT to behave incorrectly. + +.. + +.. date: 2025-01-07-19-48-56 +.. gh-issue: 126703 +.. nonce: 0ISs-7 +.. section: Core and Builtins + +Improve performance of class methods by using a freelist. + +.. + +.. date: 2024-12-24-01-40-12 +.. gh-issue: 128137 +.. nonce: gsTwr_ +.. section: Core and Builtins + +Update :c:type:`PyASCIIObject` layout to handle interned field with the +atomic operation. Patch by Donghee Na. + +.. + +.. date: 2024-12-23-11-14-07 +.. gh-issue: 128192 +.. nonce: 02mEhD +.. section: Core and Builtins + +Upgrade HTTP digest authentication algorithm for :mod:`urllib.request` by +supporting SHA-256 digest authentication as specified in :rfc:`7616`. + +.. + +.. date: 2024-12-22-15-47-44 +.. gh-issue: 126868 +.. nonce: RpjKez +.. section: Core and Builtins + +Increase usage of freelist for :class:`int` allocation. + +.. + +.. date: 2024-12-20-23-07-33 +.. gh-issue: 114203 +.. nonce: 84NgoW +.. section: Core and Builtins + +Optimize ``Py_BEGIN_CRITICAL_SECTION`` for simple recursive calls. + +.. + +.. date: 2024-12-20-12-25-16 +.. gh-issue: 127705 +.. nonce: WmCz1z +.. section: Core and Builtins + +Adds stackref debugging when ``Py_STACKREF_DEBUG`` is set. Finds all +double-closes and leaks, logging the origin and last borrow. + +Inspired by HPy's debug mode. +https://docs.hpyproject.org/en/latest/debug-mode.html + +.. + +.. date: 2024-12-18-14-22-48 +.. gh-issue: 128079 +.. nonce: SUD5le +.. section: Core and Builtins + +Fix a bug where :keyword:`except* ` does not properly check the +return value of an :exc:`ExceptionGroup`'s :meth:`~BaseExceptionGroup.split` +function, leading to a crash in some cases. Now when +:meth:`~BaseExceptionGroup.split` returns an invalid object, +:keyword:`except* ` raises a :exc:`TypeError` with the original +raised :exc:`ExceptionGroup` object chained to it. + +.. + +.. date: 2024-12-17-22-28-15 +.. gh-issue: 128030 +.. nonce: H1ptOD +.. section: Core and Builtins + +Avoid error from calling ``PyModule_GetFilenameObject`` on a non-module +object when importing a non-existent symbol from a non-module object. + +.. + +.. date: 2024-12-17-18-20-37 +.. gh-issue: 128035 +.. nonce: JwqHdB +.. section: Core and Builtins + +Indicate through :data:`ssl.HAS_PHA` whether the :mod:`ssl` module supports +TLSv1.3 post-handshake client authentication (PHA). Patch by Will +Childs-Klein. + +.. + +.. date: 2024-12-17-13-45-33 +.. gh-issue: 127274 +.. nonce: deNxNC +.. section: Core and Builtins + +Add a new flag, ``CO_METHOD``, to :attr:`~codeobject.co_flags` that +indicates whether the code object belongs to a function defined in class +scope. + +.. + +.. date: 2024-12-15-21-11-26 +.. gh-issue: 66409 +.. nonce: wv109z +.. section: Core and Builtins + +During the :ref:`path initialization `, we now check if +``base_exec_prefix`` is the same as ``base_prefix`` before falling back to +searching the Python interpreter directory. + +.. + +.. date: 2024-12-15-19-51-54 +.. gh-issue: 127970 +.. nonce: vdUp-y +.. section: Core and Builtins + +We now use the location of the ``libpython`` runtime library used in the +current proccess to determine :data:`sys.base_prefix` on all platforms +implementing the `dladdr +`_ +function defined by the UNIX standard — this includes Linux, Android, macOS, +iOS, FreeBSD, etc. This was already the case on Windows and macOS Framework +builds. + +.. + +.. date: 2024-12-13-15-21-45 +.. gh-issue: 127773 +.. nonce: E-DZR4 +.. section: Core and Builtins + +Do not use the type attribute cache for types with incompatible :term:`MRO`. + +.. + +.. date: 2024-12-13-14-17-24 +.. gh-issue: 127903 +.. nonce: vemHSl +.. section: Core and Builtins + +``Objects/unicodeobject.c``: fix a crash on DEBUG builds in +``_copy_characters`` when there is nothing to copy. + +.. + +.. date: 2024-12-11-14-32-22 +.. gh-issue: 127809 +.. nonce: 0W8khe +.. section: Core and Builtins + +Fix an issue where the experimental JIT may infer an incorrect result type +for exponentiation (``**`` and ``**=``), leading to bugs or crashes. + +.. + +.. date: 2024-12-02-18-15-37 +.. gh-issue: 126862 +.. nonce: fdIK7T +.. section: Core and Builtins + +Fix a possible overflow when a class inherits from an absurd number of +super-classes. Reported by Valery Fedorenko. Patch by Bénédikt Tran. + +.. + +.. date: 2025-01-12-12-19-51 +.. gh-issue: 128400 +.. nonce: OwoIDw +.. section: C API + +:c:func:`Py_FatalError` no longer shows all threads on the :term:`free +threaded ` build to prevent crashes. + +.. + +.. date: 2025-01-08-13-13-18 +.. gh-issue: 128629 +.. nonce: gSmzyl +.. section: C API + +Add macros :c:func:`Py_PACK_VERSION` and :c:func:`Py_PACK_FULL_VERSION` for +bit-packing Python version numbers. + +.. + +.. date: 2024-12-16-21-59-06 +.. gh-issue: 128008 +.. nonce: fa9Jt0 +.. section: C API + +Add :c:func:`PyWeakref_IsDead` function, which tests if a weak reference is +dead. + +.. + +.. date: 2024-12-11-13-01-26 +.. gh-issue: 127350 +.. nonce: uEBZZ4 +.. section: C API + +Add :c:func:`Py_fopen` function to open a file. Similar to the +:c:func:`!fopen` function, but the *path* parameter is a Python object and +an exception is set on error. Add also :c:func:`Py_fclose` function to close +a file, function needed for Windows support. Patch by Victor Stinner. + +.. + +.. date: 2025-01-09-19-44-00 +.. gh-issue: 128627 +.. nonce: mHzsEd +.. section: Build + +For Emscripten builds the function pointer cast call trampoline now uses the +wasm-gc ref.test instruction if it's available instead of Wasm JS type +reflection. + +.. + +.. date: 2025-01-04-22-39-10 +.. gh-issue: 128472 +.. nonce: Wt5E6M +.. section: Build + +Skip BOLT optimization of functions using computed gotos, fixing errors on +build with LLVM 19. + +.. + +.. date: 2025-01-02-12-50-46 +.. gh-issue: 115765 +.. nonce: jko7Fg +.. section: Build + +GNU Autoconf 2.72 is now required to generate :file:`!configure`. Patch by +Erlend Aasland. + +.. + +.. date: 2025-01-02-11-02-45 +.. gh-issue: 123925 +.. nonce: TLlyUi +.. section: Build + +Fix building the :mod:`curses` module on platforms with libncurses but +without libncursesw. + +.. + +.. date: 2024-12-31-17-09-37 +.. gh-issue: 90905 +.. nonce: PjLNai +.. section: Build + +Add support for cross-compiling to x86_64 on aarch64/arm64 macOS. + +.. + +.. date: 2024-12-28-21-05-19 +.. gh-issue: 128321 +.. nonce: 0UvbXw +.. section: Build + +Set ``LIBS`` instead of ``LDFLAGS`` when checking if :mod:`sqlite3` library +functions are available. This fixes the ordering of linked libraries during +checks, which was incorrect when using a statically linked ``libsqlite3``. + +.. + +.. date: 2024-12-21-09-56-37 +.. gh-issue: 100384 +.. nonce: Ib-XrN +.. section: Build + +Error on ``unguarded-availability`` in macOS builds, preventing invalid use +of symbols that are not available in older versions of the OS. + +.. + +.. date: 2024-12-20-09-03-22 +.. gh-issue: 128104 +.. nonce: m_SoVx +.. section: Build + +Remove ``Py_STRFTIME_C99_SUPPORT`` conditions in favor of requiring C99 +:manpage:`strftime(3)` specifier support at build time. When +cross-compiling, there is no build time check and support is assumed. + +.. + +.. date: 2024-12-16-16-16-35 +.. gh-issue: 127951 +.. nonce: lpE13- +.. section: Build + +Add option ``--pystats`` to the Windows build to enable performance +statistics collection. diff --git a/Misc/NEWS.d/3.5.3rc1.rst b/Misc/NEWS.d/3.5.3rc1.rst index 2424604249a65c..cfc729dd82556f 100644 --- a/Misc/NEWS.d/3.5.3rc1.rst +++ b/Misc/NEWS.d/3.5.3rc1.rst @@ -1146,7 +1146,7 @@ after a commit. .. section: Library A new version of typing.py from https://github.com/python/typing: -Collection (only for 3.6) (Issue #27598). Add FrozenSet to __all__ +Collection (only for 3.6) (issue #27598). Add FrozenSet to __all__ (upstream #261). Fix crash in _get_type_vars() (upstream #259). Remove the dict constraint in ForwardRef._eval_type (upstream #252). diff --git a/Misc/NEWS.d/3.6.0a4.rst b/Misc/NEWS.d/3.6.0a4.rst index 3abbdecb57038b..6f3f5262e5749d 100644 --- a/Misc/NEWS.d/3.6.0a4.rst +++ b/Misc/NEWS.d/3.6.0a4.rst @@ -177,7 +177,7 @@ Support keyword arguments to zlib.decompress(). Patch by Xiang Zhang. .. section: Library Prevent segfault after interpreter re-initialization due to ref count -problem introduced in code for Issue #27038 in 3.6.0a3. Patch by Xiang +problem introduced in code for issue #27038 in 3.6.0a3. Patch by Xiang Zhang. .. diff --git a/Misc/NEWS.d/3.6.0b1.rst b/Misc/NEWS.d/3.6.0b1.rst index bd54cf601d053b..1e2dcdd6c642bb 100644 --- a/Misc/NEWS.d/3.6.0b1.rst +++ b/Misc/NEWS.d/3.6.0b1.rst @@ -1137,7 +1137,7 @@ chunked transfer-encoding. .. section: Library A new version of typing.py from https://github.com/python/typing: - -Collection (only for 3.6) (Issue #27598) - Add FrozenSet to __all__ +Collection (only for 3.6) (issue #27598) - Add FrozenSet to __all__ (upstream #261) - fix crash in _get_type_vars() (upstream #259) - Remove the dict constraint in ForwardRef._eval_type (upstream #252) diff --git a/Misc/NEWS.d/next/Build/2024-12-16-16-16-35.gh-issue-127951.lpE13-.rst b/Misc/NEWS.d/next/Build/2024-12-16-16-16-35.gh-issue-127951.lpE13-.rst deleted file mode 100644 index 0c1df0e6bd7baa..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-12-16-16-16-35.gh-issue-127951.lpE13-.rst +++ /dev/null @@ -1 +0,0 @@ -Add option ``--pystats`` to the Windows build to enable performance statistics collection. diff --git a/Misc/NEWS.d/next/Build/2024-12-20-09-03-22.gh-issue-128104.m_SoVx.rst b/Misc/NEWS.d/next/Build/2024-12-20-09-03-22.gh-issue-128104.m_SoVx.rst deleted file mode 100644 index c3a47fbecd1dad..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-12-20-09-03-22.gh-issue-128104.m_SoVx.rst +++ /dev/null @@ -1,3 +0,0 @@ -Remove ``Py_STRFTIME_C99_SUPPORT`` conditions in favor of requiring C99 -:manpage:`strftime(3)` specifier support at build time. When cross-compiling, -there is no build time check and support is assumed. diff --git a/Misc/NEWS.d/next/Build/2024-12-21-09-56-37.gh-issue-100384.Ib-XrN.rst b/Misc/NEWS.d/next/Build/2024-12-21-09-56-37.gh-issue-100384.Ib-XrN.rst deleted file mode 100644 index 75c19fe3d8cef9..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-12-21-09-56-37.gh-issue-100384.Ib-XrN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Error on ``unguarded-availability`` in macOS builds, preventing invalid -use of symbols that are not available in older versions of the OS. diff --git a/Misc/NEWS.d/next/Build/2024-12-28-21-05-19.gh-issue-128321.0UvbXw.rst b/Misc/NEWS.d/next/Build/2024-12-28-21-05-19.gh-issue-128321.0UvbXw.rst deleted file mode 100644 index ed72cc8ab1449a..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-12-28-21-05-19.gh-issue-128321.0UvbXw.rst +++ /dev/null @@ -1,3 +0,0 @@ -Set ``LIBS`` instead of ``LDFLAGS`` when checking if :mod:`sqlite3` library -functions are available. This fixes the ordering of linked libraries during -checks, which was incorrect when using a statically linked ``libsqlite3``. diff --git a/Misc/NEWS.d/next/Build/2025-01-02-11-02-45.gh-issue-123925.TLlyUi.rst b/Misc/NEWS.d/next/Build/2025-01-02-11-02-45.gh-issue-123925.TLlyUi.rst deleted file mode 100644 index a2a9c6fc7680b5..00000000000000 --- a/Misc/NEWS.d/next/Build/2025-01-02-11-02-45.gh-issue-123925.TLlyUi.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix building the :mod:`curses` module on platforms with libncurses but -without libncursesw. diff --git a/Misc/NEWS.d/next/Build/2025-01-02-12-50-46.gh-issue-115765.jko7Fg.rst b/Misc/NEWS.d/next/Build/2025-01-02-12-50-46.gh-issue-115765.jko7Fg.rst deleted file mode 100644 index 34618c2c1288bc..00000000000000 --- a/Misc/NEWS.d/next/Build/2025-01-02-12-50-46.gh-issue-115765.jko7Fg.rst +++ /dev/null @@ -1,2 +0,0 @@ -GNU Autoconf 2.72 is now required to generate :file:`!configure`. -Patch by Erlend Aasland. diff --git a/Misc/NEWS.d/next/Build/2025-01-16-03-35-37.gh-issue-128902.Dt7xtV.rst b/Misc/NEWS.d/next/Build/2025-01-16-03-35-37.gh-issue-128902.Dt7xtV.rst new file mode 100644 index 00000000000000..42ac492498e029 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2025-01-16-03-35-37.gh-issue-128902.Dt7xtV.rst @@ -0,0 +1 @@ +Fix compile errors with Clang 9 and older due to lack of ``__attribute__((fallthrough))`` support. diff --git a/Misc/NEWS.d/next/C_API/2024-12-14-03-40-15.gh-issue-127925.FF7aov.rst b/Misc/NEWS.d/next/C_API/2024-12-14-03-40-15.gh-issue-127925.FF7aov.rst new file mode 100644 index 00000000000000..6cf5fd2872cd43 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-12-14-03-40-15.gh-issue-127925.FF7aov.rst @@ -0,0 +1,3 @@ +Convert the :mod:`decimal` module to use :pep:`757` C API (export-import +integers), offering some speed-up if the integer part of the +:class:`~decimal.Decimal` instance is small. Patch by Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/C_API/2024-12-16-21-59-06.gh-issue-128008.fa9Jt0.rst b/Misc/NEWS.d/next/C_API/2024-12-16-21-59-06.gh-issue-128008.fa9Jt0.rst deleted file mode 100644 index 2349eccac8fedc..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-12-16-21-59-06.gh-issue-128008.fa9Jt0.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :c:func:`PyWeakref_IsDead` function, which tests if a weak reference is -dead. diff --git a/Misc/NEWS.d/next/C_API/2025-01-01-03-25-38.gh-issue-126599.MRCYlH.rst b/Misc/NEWS.d/next/C_API/2025-01-01-03-25-38.gh-issue-126599.MRCYlH.rst new file mode 100644 index 00000000000000..8362ee3a2b1760 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-01-03-25-38.gh-issue-126599.MRCYlH.rst @@ -0,0 +1 @@ +Remove some internal test APIs for the experimental JIT compiler. diff --git a/Misc/NEWS.d/next/C_API/2025-01-15-11-42-07.gh-issue-128863.C9MkB_.rst b/Misc/NEWS.d/next/C_API/2025-01-15-11-42-07.gh-issue-128863.C9MkB_.rst new file mode 100644 index 00000000000000..b2e5664138fba2 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-15-11-42-07.gh-issue-128863.C9MkB_.rst @@ -0,0 +1,19 @@ +The following private functions are deprecated and planned for removal in +Python 3.18: + +* :c:func:`!_PyBytes_Join`: use :c:func:`PyBytes_Join`. +* :c:func:`!_PyDict_GetItemStringWithError`: use :c:func:`PyDict_GetItemStringRef`. +* :c:func:`!_PyDict_Pop()`: use :c:func:`PyDict_Pop`. +* :c:func:`!_PyLong_Sign()`: use :c:func:`PyLong_GetSign`. +* :c:func:`!_PyLong_FromDigits` and :c:func:`!_PyLong_New`: + use :c:func:`PyLongWriter_Create`. +* :c:func:`!_PyThreadState_UncheckedGet`: use :c:func:`PyThreadState_GetUnchecked`. +* :c:func:`!_PyUnicode_AsString`: use :c:func:`PyUnicode_AsUTF8`. +* :c:func:`!_Py_HashPointer`: use :c:func:`Py_HashPointer`. +* :c:func:`!_Py_fopen_obj`: use :c:func:`Py_fopen`. + +The `pythoncapi-compat project +`__ can be used to get these new +public functions on Python 3.13 and older. + +Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-16-12-47-01.gh-issue-128911.mHVJ4x.rst b/Misc/NEWS.d/next/C_API/2025-01-16-12-47-01.gh-issue-128911.mHVJ4x.rst new file mode 100644 index 00000000000000..d32cd00cd5d605 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-16-12-47-01.gh-issue-128911.mHVJ4x.rst @@ -0,0 +1,3 @@ +Add :c:func:`PyImport_ImportModuleAttr` and :c:func:`PyImport_ImportModuleAttrString` +helper functions to import a module and get an attribute of the module. Patch +by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-16-21-56-49.gh-issue-128844.ZPiJuo.rst b/Misc/NEWS.d/next/C_API/2025-01-16-21-56-49.gh-issue-128844.ZPiJuo.rst new file mode 100644 index 00000000000000..d9e1962631026a --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-16-21-56-49.gh-issue-128844.ZPiJuo.rst @@ -0,0 +1,3 @@ +Add :c:func:`PyUnstable_TryIncRef` and :c:func:`PyUnstable_EnableTryIncRef` +unstable APIs. These are helpers for dealing with unowned references in +a thread-safe way, particularly in the free threading build. diff --git a/Misc/NEWS.d/next/C_API/2025-01-19-23-17-58.gh-issue-129033.cpRivP.rst b/Misc/NEWS.d/next/C_API/2025-01-19-23-17-58.gh-issue-129033.cpRivP.rst new file mode 100644 index 00000000000000..3cd19cc48e3416 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-19-23-17-58.gh-issue-129033.cpRivP.rst @@ -0,0 +1,3 @@ +Remove the private ``_Py_InitializeMain()`` function. It was a +:term:`provisional API` added to Python 3.8 by :pep:`587`. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-20-10-40-11.gh-issue-129033.d1jltB.rst b/Misc/NEWS.d/next/C_API/2025-01-20-10-40-11.gh-issue-129033.d1jltB.rst new file mode 100644 index 00000000000000..c0c109d5ce1ca2 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-20-10-40-11.gh-issue-129033.d1jltB.rst @@ -0,0 +1,4 @@ +Remove ``_PyInterpreterState_GetConfigCopy()`` and +``_PyInterpreterState_SetConfig()`` private functions. Use instead +:c:func:`PyConfig_Get` and :c:func:`PyConfig_Set`, public C API added by +:pep:`741` "Python Configuration C API". Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-22-09-28-04.gh-issue-128509.gqQ36L.rst b/Misc/NEWS.d/next/C_API/2025-01-22-09-28-04.gh-issue-128509.gqQ36L.rst new file mode 100644 index 00000000000000..c4a048fe3195d1 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-22-09-28-04.gh-issue-128509.gqQ36L.rst @@ -0,0 +1,2 @@ +Add :c:func:`PyUnstable_IsImmortal` for determining whether an object is +:term:`immortal`. diff --git a/Misc/NEWS.d/next/C_API/2025-01-28-13-21-17.gh-issue-91417.AfiR0t.rst b/Misc/NEWS.d/next/C_API/2025-01-28-13-21-17.gh-issue-91417.AfiR0t.rst new file mode 100644 index 00000000000000..e1017188b8d0ce --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-28-13-21-17.gh-issue-91417.AfiR0t.rst @@ -0,0 +1,3 @@ +Remove :c:func:`PySequence_Fast` from the limited C API, since this function +has to be used with :c:macro:`PySequence_Fast_GET_ITEM` which never worked +in the limited C API. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-29-11-58-38.gh-issue-89188.BsfLr3.rst b/Misc/NEWS.d/next/C_API/2025-01-29-11-58-38.gh-issue-89188.BsfLr3.rst new file mode 100644 index 00000000000000..7ff225a7dc60c7 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-29-11-58-38.gh-issue-89188.BsfLr3.rst @@ -0,0 +1,3 @@ +Implement :c:func:`PyUnicode_KIND` and :c:func:`PyUnicode_DATA` as function, +in addition to the macros with the same names. The macros rely on C bit +fields which have compiler-specific layout. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2023-12-04-15-53-25.gh-issue-112713.Zrhv77.rst b/Misc/NEWS.d/next/Core_and_Builtins/2023-12-04-15-53-25.gh-issue-112713.Zrhv77.rst new file mode 100644 index 00000000000000..ee1f33f95647bd --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2023-12-04-15-53-25.gh-issue-112713.Zrhv77.rst @@ -0,0 +1 @@ +Added support for the ``Partitioned`` cookie flag in :mod:`http.cookies`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-02-29-16-55-52.gh-issue-115911.Vnkue_.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-02-29-16-55-52.gh-issue-115911.Vnkue_.rst new file mode 100644 index 00000000000000..717804be95b18b --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-02-29-16-55-52.gh-issue-115911.Vnkue_.rst @@ -0,0 +1,3 @@ +If the current working directory cannot be determined due to permissions, +then import will no longer raise :exc:`PermissionError`. Patch by Alex +Willmer. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-06-05-16.gh-issue-126349.7YwWsI.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-06-05-16.gh-issue-126349.7YwWsI.rst new file mode 100644 index 00000000000000..aecc8c9abf3ce9 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-06-05-16.gh-issue-126349.7YwWsI.rst @@ -0,0 +1,2 @@ +Add :func:`turtle.fill`, :func:`turtle.poly` and :func:`turtle.no_animation` context managers. +Patch by Marie Roald and Yngve Mardal Moe. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-12-17-45.gh-issue-125723.tW_hFG.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-12-17-45.gh-issue-125723.tW_hFG.rst new file mode 100644 index 00000000000000..62ca6f62f521a8 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-12-17-45.gh-issue-125723.tW_hFG.rst @@ -0,0 +1,2 @@ +Fix crash with ``gi_frame.f_locals`` when generator frames outlive their +generator. Patch by Mikhail Efimov. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-30-16-13-31.gh-issue-127349.ssYd6n.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-30-16-13-31.gh-issue-127349.ssYd6n.rst new file mode 100644 index 00000000000000..3c1586b6cbb8e7 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-30-16-13-31.gh-issue-127349.ssYd6n.rst @@ -0,0 +1,2 @@ +Fixed the error when resizing terminal in Python REPL. Patch by Semyon +Moroz. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-04-22-14-40.gh-issue-127119._hpyFE.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-04-22-14-40.gh-issue-127119._hpyFE.rst new file mode 100644 index 00000000000000..f021bd490f488c --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-04-22-14-40.gh-issue-127119._hpyFE.rst @@ -0,0 +1 @@ +Slightly optimize the :class:`int` deallocator. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-17-46.gh-issue-126004.-p8MAS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-17-46.gh-issue-126004.-p8MAS.rst new file mode 100644 index 00000000000000..60b1c5d8b80793 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-17-46.gh-issue-126004.-p8MAS.rst @@ -0,0 +1,3 @@ +Fix handling of :attr:`UnicodeError.start` and :attr:`UnicodeError.end` +values in the :func:`codecs.xmlcharrefreplace_errors` error handler. +Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-30-58.gh-issue-126004.-p8MAS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-30-58.gh-issue-126004.-p8MAS.rst new file mode 100644 index 00000000000000..619d73042a9bb8 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-30-58.gh-issue-126004.-p8MAS.rst @@ -0,0 +1,3 @@ +Fix handling of :attr:`UnicodeError.start` and :attr:`UnicodeError.end` +values in the :func:`codecs.backslashreplace_errors` error handler. Patch by +Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-32-58.gh-issue-126004.CYAwTB.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-32-58.gh-issue-126004.CYAwTB.rst new file mode 100644 index 00000000000000..de70c59ee48eec --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-32-58.gh-issue-126004.CYAwTB.rst @@ -0,0 +1,3 @@ +Fix handling of :attr:`UnicodeError.start` and :attr:`UnicodeError.end` +values in the :func:`codecs.replace_errors` error handler. Patch by Bénédikt +Tran. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-13-14-17-24.gh-issue-127903.vemHSl.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-13-14-17-24.gh-issue-127903.vemHSl.rst deleted file mode 100644 index ad479b52d1675c..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-13-14-17-24.gh-issue-127903.vemHSl.rst +++ /dev/null @@ -1,2 +0,0 @@ -``Objects/unicodeobject.c``: fix a crash on DEBUG builds in ``_copy_characters`` -when there is nothing to copy. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-09-28-17.gh-issue-128016.DPqhah.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-09-28-17.gh-issue-128016.DPqhah.rst new file mode 100644 index 00000000000000..0832d777bc3251 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-09-28-17.gh-issue-128016.DPqhah.rst @@ -0,0 +1 @@ +Improved the ``SyntaxWarning`` message for invalid escape sequences to clarify that such sequences will raise a ``SyntaxError`` in future Python releases. The new message also suggests a potential fix, i.e., ``Did you mean "\\e"?``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-13-45-33.gh-issue-127274.deNxNC.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-13-45-33.gh-issue-127274.deNxNC.rst deleted file mode 100644 index a4608fbbbf19ec..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-13-45-33.gh-issue-127274.deNxNC.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add a new flag, ``CO_METHOD``, to :attr:`~codeobject.co_flags` that -indicates whether the code object belongs to a function defined in class -scope. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-18-20-37.gh-issue-128035.JwqHdB.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-18-20-37.gh-issue-128035.JwqHdB.rst deleted file mode 100644 index 27815d48425334..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-18-20-37.gh-issue-128035.JwqHdB.rst +++ /dev/null @@ -1 +0,0 @@ -Indicate through :data:`ssl.HAS_PHA` whether the :mod:`ssl` module supports TLSv1.3 post-handshake client authentication (PHA). Patch by Will Childs-Klein. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-22-28-15.gh-issue-128030.H1ptOD.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-22-28-15.gh-issue-128030.H1ptOD.rst deleted file mode 100644 index 93d78632355b76..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-22-28-15.gh-issue-128030.H1ptOD.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid error from calling ``PyModule_GetFilenameObject`` on a non-module object when importing a non-existent symbol from a non-module object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-18-14-22-48.gh-issue-128079.SUD5le.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-18-14-22-48.gh-issue-128079.SUD5le.rst deleted file mode 100644 index 8da4e677f068a3..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-18-14-22-48.gh-issue-128079.SUD5le.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fix a bug where :keyword:`except* ` does not properly check the -return value of an :exc:`ExceptionGroup`'s :meth:`~BaseExceptionGroup.split` -function, leading to a crash in some cases. Now when :meth:`~BaseExceptionGroup.split` -returns an invalid object, :keyword:`except* ` raises a :exc:`TypeError` -with the original raised :exc:`ExceptionGroup` object chained to it. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-20-12-25-16.gh-issue-127705.WmCz1z.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-20-12-25-16.gh-issue-127705.WmCz1z.rst deleted file mode 100644 index fde12b78ce0444..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-20-12-25-16.gh-issue-127705.WmCz1z.rst +++ /dev/null @@ -1,4 +0,0 @@ -Adds stackref debugging when ``Py_STACKREF_DEBUG`` is set. Finds all -double-closes and leaks, logging the origin and last borrow. - -Inspired by HPy's debug mode. https://docs.hpyproject.org/en/latest/debug-mode.html diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-20-23-07-33.gh-issue-114203.84NgoW.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-20-23-07-33.gh-issue-114203.84NgoW.rst deleted file mode 100644 index 6a9856e90c32bc..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-20-23-07-33.gh-issue-114203.84NgoW.rst +++ /dev/null @@ -1 +0,0 @@ -Optimize ``Py_BEGIN_CRITICAL_SECTION`` for simple recursive calls. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-22-15-47-44.gh-issue-126868.RpjKez.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-22-15-47-44.gh-issue-126868.RpjKez.rst deleted file mode 100644 index ede383deb4ad31..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-22-15-47-44.gh-issue-126868.RpjKez.rst +++ /dev/null @@ -1 +0,0 @@ -Increase usage of freelist for :class:`int` allocation. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-23-11-14-07.gh-issue-128192.02mEhD.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-23-11-14-07.gh-issue-128192.02mEhD.rst deleted file mode 100644 index b80ab715ffc7db..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-23-11-14-07.gh-issue-128192.02mEhD.rst +++ /dev/null @@ -1,2 +0,0 @@ -Upgrade HTTP digest authentication algorithm for :mod:`urllib.request` by -supporting SHA-256 digest authentication as specified in :rfc:`7616`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-24-01-40-12.gh-issue-128137.gsTwr_.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-24-01-40-12.gh-issue-128137.gsTwr_.rst deleted file mode 100644 index a3b7cde7f67676..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-24-01-40-12.gh-issue-128137.gsTwr_.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update :c:type:`PyASCIIObject` layout to handle interned field with the -atomic operation. Patch by Donghee Na. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-29-15-09-21.gh-issue-128330.IaYL7G.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-29-15-09-21.gh-issue-128330.IaYL7G.rst new file mode 100644 index 00000000000000..8fe628d18f4bd4 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-29-15-09-21.gh-issue-128330.IaYL7G.rst @@ -0,0 +1 @@ +Restore terminal control characters on REPL exit. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-30-15-49-31.gh-issue-127953.B4_6L9.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-30-15-49-31.gh-issue-127953.B4_6L9.rst new file mode 100644 index 00000000000000..f19afcd90b16ea --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-30-15-49-31.gh-issue-127953.B4_6L9.rst @@ -0,0 +1,2 @@ +The time to handle a ``LINE`` event in sys.monitoring (and sys.settrace) is +now independent of the number of lines in the code object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-07-19-26-40.gh-issue-126703.9i-S5t.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-07-19-26-40.gh-issue-126703.9i-S5t.rst new file mode 100644 index 00000000000000..dcd5f449c98ef3 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-07-19-26-40.gh-issue-126703.9i-S5t.rst @@ -0,0 +1 @@ +Improve performance of iterating over lists and tuples by using a freelist for the iterator objects. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-10-23-54-16.gh-issue-100239.ijOOUs.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-10-23-54-16.gh-issue-100239.ijOOUs.rst new file mode 100644 index 00000000000000..f58c1fc767515e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-10-23-54-16.gh-issue-100239.ijOOUs.rst @@ -0,0 +1,2 @@ +Add opcode ``BINARY_OP_EXTEND`` which executes a pair of functions (guard and +specialization functions) accessed from the inline cache. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-13-17-03-49.gh-issue-128807.BGGBxD.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-13-17-03-49.gh-issue-128807.BGGBxD.rst new file mode 100644 index 00000000000000..34952e9abb66e5 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-13-17-03-49.gh-issue-128807.BGGBxD.rst @@ -0,0 +1,6 @@ +Add a marking phase to the free-threaded GC. This is similar to what was +done in GH-126491. Since the free-threaded GC does not have generations and +is not incremental, the marking phase looks for all objects reachable from +known roots. The roots are objects known to not be garbage, like the module +dictionary for :mod:`sys`. For most programs, this marking phase should +make the GC a bit faster since typically less work is done per object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-18-16-18.gh-issue-128910.9pqfab.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-18-16-18.gh-issue-128910.9pqfab.rst new file mode 100644 index 00000000000000..e095ba9ebf6be4 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-18-16-18.gh-issue-128910.9pqfab.rst @@ -0,0 +1,2 @@ +Undocumented and unused private C-API functions ``_PyTrash_begin`` and +``_PyTrash_end`` are removed. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-22-54-12.gh-issue-100239.7_HpBU.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-22-54-12.gh-issue-100239.7_HpBU.rst new file mode 100644 index 00000000000000..6f086b7ecc0036 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-22-54-12.gh-issue-100239.7_HpBU.rst @@ -0,0 +1 @@ +Specialize ``BINARY_OP`` for bitwise logical operations on compact ints. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-17-13-16-14.gh-issue-128842.OMs5X6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-17-13-16-14.gh-issue-128842.OMs5X6.rst new file mode 100644 index 00000000000000..9898060076db79 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-17-13-16-14.gh-issue-128842.OMs5X6.rst @@ -0,0 +1 @@ +Collect JIT memory stats using pystats. Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-18-01-06-58.gh-issue-128799.vSNagk.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-18-01-06-58.gh-issue-128799.vSNagk.rst new file mode 100644 index 00000000000000..eb2361bb5d4525 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-18-01-06-58.gh-issue-128799.vSNagk.rst @@ -0,0 +1 @@ +Add frame of ``except*`` to traceback when it wraps a naked exception. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-19-48-30.gh-issue-124363.vOFhHW.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-19-48-30.gh-issue-124363.vOFhHW.rst new file mode 100644 index 00000000000000..553aa5a4dd573e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-19-48-30.gh-issue-124363.vOFhHW.rst @@ -0,0 +1 @@ +Treat debug expressions in f-string as raw strings. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-23-35-41.gh-issue-129093.0rvETC.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-23-35-41.gh-issue-129093.0rvETC.rst new file mode 100644 index 00000000000000..067d52eff2da1e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-23-35-41.gh-issue-129093.0rvETC.rst @@ -0,0 +1,2 @@ +Fix f-strings such as ``f'{expr=}'`` sometimes not displaying the full +expression when the expression contains ``!=``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-24-44.gh-issue-129149.wAYu43.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-24-44.gh-issue-129149.wAYu43.rst new file mode 100644 index 00000000000000..d946f6a2fea185 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-24-44.gh-issue-129149.wAYu43.rst @@ -0,0 +1,2 @@ +Add fast path for medium-size integers in :c:func:`PyLong_FromUnsignedLong`, +:c:func:`PyLong_FromUnsignedLongLong` and :c:func:`PyLong_FromSize_t`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-24-11-37-22.gh-issue-129231.ZsAP9v.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-24-11-37-22.gh-issue-129231.ZsAP9v.rst new file mode 100644 index 00000000000000..b30492a1947058 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-24-11-37-22.gh-issue-129231.ZsAP9v.rst @@ -0,0 +1 @@ +Improve memory layout of JIT traces. Patch by Diego Russo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-06-23-59.gh-issue-129345.uOjkML.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-06-23-59.gh-issue-129345.uOjkML.rst new file mode 100644 index 00000000000000..68e1103db45652 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-06-23-59.gh-issue-129345.uOjkML.rst @@ -0,0 +1 @@ +Fix null pointer dereference in :func:`syslog.openlog` when an audit hook raises an exception. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-10-26-04.gh-issue-129393.0eICq6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-10-26-04.gh-issue-129393.0eICq6.rst new file mode 100644 index 00000000000000..e36e6f565efd81 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-10-26-04.gh-issue-129393.0eICq6.rst @@ -0,0 +1,2 @@ +On FreeBSD, :data:`sys.platform` doesn't contain the major version anymore. +It is always ``'freebsd'``, instead of ``'freebsd13'`` or ``'freebsd14'``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-11-13-41.gh-issue-128563.xElppE.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-11-13-41.gh-issue-128563.xElppE.rst new file mode 100644 index 00000000000000..dfd932e836bda6 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-11-13-41.gh-issue-128563.xElppE.rst @@ -0,0 +1,2 @@ +Fix an issue where the "lltrace" debug feature could have been incorrectly +enabled for some frames. diff --git a/Misc/NEWS.d/next/Documentation/2025-01-14-11-06-41.gh-issue-67206.LYKmi5.rst b/Misc/NEWS.d/next/Documentation/2025-01-14-11-06-41.gh-issue-67206.LYKmi5.rst new file mode 100644 index 00000000000000..11fb617e98edc3 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2025-01-14-11-06-41.gh-issue-67206.LYKmi5.rst @@ -0,0 +1,3 @@ +Document that :const:`string.printable` is not printable in the POSIX sense. +In particular, :meth:`string.printable.isprintable() ` returns +:const:`False`. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Documentation/2025-01-16-18-59-11.gh-issue-125722.eHHRga.rst b/Misc/NEWS.d/next/Documentation/2025-01-16-18-59-11.gh-issue-125722.eHHRga.rst new file mode 100644 index 00000000000000..bf6253eed2eb90 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2025-01-16-18-59-11.gh-issue-125722.eHHRga.rst @@ -0,0 +1,2 @@ +Require Sphinx 8.1.3 or later to build the Python documentation. Patch by +Adam Turner. diff --git a/Misc/NEWS.d/next/Library/2020-08-07-16-55-57.bpo-27307.Xqzzda.rst b/Misc/NEWS.d/next/Library/2020-08-07-16-55-57.bpo-27307.Xqzzda.rst new file mode 100644 index 00000000000000..6e7a856d994cb6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-08-07-16-55-57.bpo-27307.Xqzzda.rst @@ -0,0 +1 @@ +Add attribute and item access support to :class:`string.Formatter` in auto-numbering mode, which allows format strings like '{.name}' and '{[1]}'. diff --git a/Misc/NEWS.d/next/Library/2022-05-23-21-23-29.gh-issue-81340.D11RkZ.rst b/Misc/NEWS.d/next/Library/2022-05-23-21-23-29.gh-issue-81340.D11RkZ.rst new file mode 100644 index 00000000000000..49e6305bf83138 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-05-23-21-23-29.gh-issue-81340.D11RkZ.rst @@ -0,0 +1,5 @@ +Use :func:`os.copy_file_range` in :func:`shutil.copy`, :func:`shutil.copy2`, +and :func:`shutil.copyfile` functions by default. An underlying Linux system +call gives filesystems an opportunity to implement the use of copy-on-write +(in case of btrfs and XFS) or server-side copy (in the case of NFS.) +Patch by Illia Volochii. diff --git a/Misc/NEWS.d/next/Library/2022-07-28-12-32-59.gh-issue-95371.F24IFC.rst b/Misc/NEWS.d/next/Library/2022-07-28-12-32-59.gh-issue-95371.F24IFC.rst deleted file mode 100644 index 4a62aaed78b425..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-07-28-12-32-59.gh-issue-95371.F24IFC.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for other image formats (PNG, PGM, and PPM) to the turtle module. Patch by Shin-myoung-serp. diff --git a/Misc/NEWS.d/next/Library/2023-02-01-16-41-31.gh-issue-101410.Dt2aQE.rst b/Misc/NEWS.d/next/Library/2023-02-01-16-41-31.gh-issue-101410.Dt2aQE.rst new file mode 100644 index 00000000000000..31493686daec97 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-01-16-41-31.gh-issue-101410.Dt2aQE.rst @@ -0,0 +1,3 @@ +Support custom messages for domain errors in the :mod:`math` module +(:func:`math.sqrt`, :func:`math.log` and :func:`math.atanh` were modified as +examples). Patch by Charlie Zhao and Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/Library/2023-11-12-21-53-40.gh-issue-112015.2WPRxE.rst b/Misc/NEWS.d/next/Library/2023-11-12-21-53-40.gh-issue-112015.2WPRxE.rst deleted file mode 100644 index 4b58ec9d219eff..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-12-21-53-40.gh-issue-112015.2WPRxE.rst +++ /dev/null @@ -1,5 +0,0 @@ -:func:`ctypes.memoryview_at` now exists to create a -:class:`memoryview` object that refers to the supplied pointer and -length. This works like :func:`ctypes.string_at` except it avoids a -buffer copy, and is typically useful when implementing pure Python -callback functions that are passed dynamically-sized buffers. diff --git a/Misc/NEWS.d/next/Library/2024-07-13-13-25-31.gh-issue-121676.KDLS11.rst b/Misc/NEWS.d/next/Library/2024-07-13-13-25-31.gh-issue-121676.KDLS11.rst deleted file mode 100644 index be589b727a1968..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-07-13-13-25-31.gh-issue-121676.KDLS11.rst +++ /dev/null @@ -1,3 +0,0 @@ -Deprecate calling the Python implementation of :meth:`functools.reduce` -with a ``function`` or ``sequence`` as a :term:`keyword argument`. -This will be forbidden in Python 3.16 in order to match the C implementation. diff --git a/Misc/NEWS.d/next/Library/2024-07-14-23-19-20.gh-issue-119257.9OEzcN.rst b/Misc/NEWS.d/next/Library/2024-07-14-23-19-20.gh-issue-119257.9OEzcN.rst new file mode 100644 index 00000000000000..8f3f863d93e021 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-14-23-19-20.gh-issue-119257.9OEzcN.rst @@ -0,0 +1,2 @@ +Show tab completions menu below the current line, which results in less +janky behaviour, and fixes a cursor movement bug. Patch by Daniel Hollas diff --git a/Misc/NEWS.d/next/Library/2024-08-28-16-10-37.gh-issue-123424.u96_i6.rst b/Misc/NEWS.d/next/Library/2024-08-28-16-10-37.gh-issue-123424.u96_i6.rst deleted file mode 100644 index 4df4bbf2ba2b73..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-08-28-16-10-37.gh-issue-123424.u96_i6.rst +++ /dev/null @@ -1 +0,0 @@ -Add :meth:`zipfile.ZipInfo._for_archive` setting default properties on :class:`~zipfile.ZipInfo` objects. Patch by Bénédikt Tran and Jason R. Coombs. diff --git a/Misc/NEWS.d/next/Library/2024-09-04-14-13-14.gh-issue-121720.z9hhXQ.rst b/Misc/NEWS.d/next/Library/2024-09-04-14-13-14.gh-issue-121720.z9hhXQ.rst deleted file mode 100644 index 96da94a9f211af..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-04-14-13-14.gh-issue-121720.z9hhXQ.rst +++ /dev/null @@ -1 +0,0 @@ -:class:`enum.EnumDict` can now be used without resorting to private API. diff --git a/Misc/NEWS.d/next/Library/2024-09-12-14-24-25.gh-issue-123987.7_OD1p.rst b/Misc/NEWS.d/next/Library/2024-09-12-14-24-25.gh-issue-123987.7_OD1p.rst new file mode 100644 index 00000000000000..b110900e7efd33 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-12-14-24-25.gh-issue-123987.7_OD1p.rst @@ -0,0 +1,3 @@ +Fixed issue in NamespaceReader where a non-path item in a namespace path, +such as a sentinel added by an editable installer, would break resource +loading. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-19-21-53.gh-issue-124703.lYTLEv.rst b/Misc/NEWS.d/next/Library/2024-09-27-19-21-53.gh-issue-124703.lYTLEv.rst new file mode 100644 index 00000000000000..e55d3539355d73 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-27-19-21-53.gh-issue-124703.lYTLEv.rst @@ -0,0 +1 @@ +Quitting :mod:`pdb` in ``inline`` mode will emit a confirmation prompt and exit gracefully now, instead of printing an exception traceback. diff --git a/Misc/NEWS.d/next/Library/2024-10-02-11-17-23.gh-issue-91048.QWY-b1.rst b/Misc/NEWS.d/next/Library/2024-10-02-11-17-23.gh-issue-91048.QWY-b1.rst new file mode 100644 index 00000000000000..c2faf470ffc9cf --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-02-11-17-23.gh-issue-91048.QWY-b1.rst @@ -0,0 +1,2 @@ +Add :func:`asyncio.capture_call_graph` and +:func:`asyncio.print_call_graph` functions. diff --git a/Misc/NEWS.d/next/Library/2024-10-04-09-56-45.gh-issue-124761.N4pSD6.rst b/Misc/NEWS.d/next/Library/2024-10-04-09-56-45.gh-issue-124761.N4pSD6.rst deleted file mode 100644 index 797dd31b368548..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-04-09-56-45.gh-issue-124761.N4pSD6.rst +++ /dev/null @@ -1 +0,0 @@ -Add :data:`~socket.SO_REUSEPORT_LB` constant to :mod:`socket` for FreeBSD. diff --git a/Misc/NEWS.d/next/Library/2024-10-26-16-59-02.gh-issue-125553.4pDLzt.rst b/Misc/NEWS.d/next/Library/2024-10-26-16-59-02.gh-issue-125553.4pDLzt.rst new file mode 100644 index 00000000000000..291c5e6f6f2181 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-26-16-59-02.gh-issue-125553.4pDLzt.rst @@ -0,0 +1,2 @@ +Fix round-trip invariance for backslash continuations in +:func:`tokenize.untokenize`. diff --git a/Misc/NEWS.d/next/Library/2024-10-31-14-31-36.gh-issue-126225.vTxGXm.rst b/Misc/NEWS.d/next/Library/2024-10-31-14-31-36.gh-issue-126225.vTxGXm.rst deleted file mode 100644 index 13a1f213c7a58e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-31-14-31-36.gh-issue-126225.vTxGXm.rst +++ /dev/null @@ -1,6 +0,0 @@ -:mod:`getopt` and :mod:`optparse` are no longer marked as deprecated. -There are legitimate reasons to use one of these modules in preference to -:mod:`argparse`, and none of these modules are at risk of being removed -from the standard library. Of the three, ``argparse`` remains the -recommended default choice, *unless* one of the concerns noted at the top of -the ``optparse`` module documentation applies. diff --git a/Misc/NEWS.d/next/Library/2024-11-09-15-59-51.gh-issue-126624.bN53Va.rst b/Misc/NEWS.d/next/Library/2024-11-09-15-59-51.gh-issue-126624.bN53Va.rst deleted file mode 100644 index 468840a651c253..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-09-15-59-51.gh-issue-126624.bN53Va.rst +++ /dev/null @@ -1,2 +0,0 @@ -Expose error code :data:`~xml.parsers.expat.errors.XML_ERROR_NOT_STARTED` -of Expat >=2.6.4 in :mod:`xml.parsers.expat.errors`. diff --git a/Misc/NEWS.d/next/Library/2024-11-10-19-45-01.gh-issue-126332.WCCKoH.rst b/Misc/NEWS.d/next/Library/2024-11-10-19-45-01.gh-issue-126332.WCCKoH.rst new file mode 100644 index 00000000000000..9277797ddc745e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-10-19-45-01.gh-issue-126332.WCCKoH.rst @@ -0,0 +1 @@ +Fix _pyrepl crash when entering a double CTRL-Z on an overflowing line. diff --git a/Misc/NEWS.d/next/Library/2024-11-11-07-56-03.gh-issue-126639.AmVSt-.rst b/Misc/NEWS.d/next/Library/2024-11-11-07-56-03.gh-issue-126639.AmVSt-.rst deleted file mode 100644 index 0b75e5858de731..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-11-07-56-03.gh-issue-126639.AmVSt-.rst +++ /dev/null @@ -1 +0,0 @@ -:class:`tempfile.NamedTemporaryFile` will now issue a :exc:`ResourceWarning` when it is finalized by the garbage collector without being explicitly closed. diff --git a/Misc/NEWS.d/next/Library/2024-11-19-10-46-57.gh-issue-124130.OZ_vR5.rst b/Misc/NEWS.d/next/Library/2024-11-19-10-46-57.gh-issue-124130.OZ_vR5.rst deleted file mode 100644 index a1d4fc8ff4c22f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-19-10-46-57.gh-issue-124130.OZ_vR5.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix a bug in matching regular expression ``\B`` in empty input string. -Now it is always the opposite of ``\b``. -To get an old behavior, use ``(?!\A\Z)\B``. -To get a new behavior in old Python versions, use ``(?!\b)``. diff --git a/Misc/NEWS.d/next/Library/2024-11-24-22-06-42.gh-issue-127096.R7LLpQ.rst b/Misc/NEWS.d/next/Library/2024-11-24-22-06-42.gh-issue-127096.R7LLpQ.rst new file mode 100644 index 00000000000000..8619296143b7d7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-24-22-06-42.gh-issue-127096.R7LLpQ.rst @@ -0,0 +1,2 @@ +Do not recreate unnamed section on every read in +:class:`configparser.ConfigParser`. Patch by Andrey Efremov. diff --git a/Misc/NEWS.d/next/Library/2024-12-02-19-13-19.gh-issue-127529.Pj1Xtf.rst b/Misc/NEWS.d/next/Library/2024-12-02-19-13-19.gh-issue-127529.Pj1Xtf.rst deleted file mode 100644 index 26f2fd5923ab7b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-02-19-13-19.gh-issue-127529.Pj1Xtf.rst +++ /dev/null @@ -1,4 +0,0 @@ -Correct behavior of -:func:`!asyncio.selector_events.BaseSelectorEventLoop._accept_connection` -in handling :exc:`ConnectionAbortedError` in a loop. This improves -performance on OpenBSD. diff --git a/Misc/NEWS.d/next/Library/2024-12-03-14-45-16.gh-issue-98188.GX9i2b.rst b/Misc/NEWS.d/next/Library/2024-12-03-14-45-16.gh-issue-98188.GX9i2b.rst deleted file mode 100644 index 30ab8cfc3f0bc6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-03-14-45-16.gh-issue-98188.GX9i2b.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix an issue in :meth:`email.message.Message.get_payload` where data -cannot be decoded if the Content Transfer Encoding mechanism contains -trailing whitespaces or additional junk text. Patch by Hui Liu. diff --git a/Misc/NEWS.d/next/Library/2024-12-03-20-28-08.gh-issue-127586.zgotYF.rst b/Misc/NEWS.d/next/Library/2024-12-03-20-28-08.gh-issue-127586.zgotYF.rst deleted file mode 100644 index 80217bd4a10503..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-03-20-28-08.gh-issue-127586.zgotYF.rst +++ /dev/null @@ -1,3 +0,0 @@ -:class:`multiprocessing.pool.Pool` now properly restores blocked signal handlers -of the parent thread when creating processes via either *spawn* or -*forkserver*. diff --git a/Misc/NEWS.d/next/Library/2024-12-04-10-39-29.gh-issue-83662.CG1s3m.rst b/Misc/NEWS.d/next/Library/2024-12-04-10-39-29.gh-issue-83662.CG1s3m.rst deleted file mode 100644 index 5e39933047993c..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-04-10-39-29.gh-issue-83662.CG1s3m.rst +++ /dev/null @@ -1,5 +0,0 @@ -Add missing ``__class_getitem__`` method to the Python implementation of -:func:`functools.partial`, to make it compatible with the C version. This is -mainly relevant for alternative Python implementations like PyPy and -GraalPy, because CPython will usually use the C-implementation of that -function. diff --git a/Misc/NEWS.d/next/Library/2024-12-06-21-03-11.gh-issue-127688.NJqtc-.rst b/Misc/NEWS.d/next/Library/2024-12-06-21-03-11.gh-issue-127688.NJqtc-.rst deleted file mode 100644 index a22b136da72faf..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-06-21-03-11.gh-issue-127688.NJqtc-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add the :data:`~os.SCHED_DEADLINE` and :data:`~os.SCHED_NORMAL` constants -to the :mod:`os` module. diff --git a/Misc/NEWS.d/next/Library/2024-12-12-07-27-51.gh-issue-127847.ksfNKM.rst b/Misc/NEWS.d/next/Library/2024-12-12-07-27-51.gh-issue-127847.ksfNKM.rst deleted file mode 100644 index 3d6e36fb538bca..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-12-07-27-51.gh-issue-127847.ksfNKM.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the position when doing interleaved seeks and reads in uncompressed, unencrypted zip files returned by :meth:`zipfile.ZipFile.open`. diff --git a/Misc/NEWS.d/next/Library/2024-12-12-18-25-50.gh-issue-127873.WJRwfz.rst b/Misc/NEWS.d/next/Library/2024-12-12-18-25-50.gh-issue-127873.WJRwfz.rst new file mode 100644 index 00000000000000..d7575c7efb6e88 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-12-18-25-50.gh-issue-127873.WJRwfz.rst @@ -0,0 +1,3 @@ +When ``-E`` is set, only ignore ``PYTHON_COLORS`` and not +``FORCE_COLOR``/``NO_COLOR``/``TERM`` when colourising output. +Patch by Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-12-13-14-21-04.gh-issue-122548.hq3Vud.rst b/Misc/NEWS.d/next/Library/2024-12-13-14-21-04.gh-issue-122548.hq3Vud.rst deleted file mode 100644 index 6cd13572ff1893..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-13-14-21-04.gh-issue-122548.hq3Vud.rst +++ /dev/null @@ -1,4 +0,0 @@ -Adds two new local events to sys.monitoring, ``BRANCH_LEFT`` and -``BRANCH_RIGHT``. This allows the two arms of the branch to be disabled -independently, which should hugely improve performance of branch-level -coverage tools. The old branch event, ``BRANCH`` is now deprecated. diff --git a/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst b/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst new file mode 100644 index 00000000000000..9a6fce8647cc6b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst @@ -0,0 +1 @@ +Add missing Deprecation warnings for :attr:`importlib.machinery.DEBUG_BYTECODE_SUFFIXES`, :attr:`importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES`, :class:`importlib.machinery.WindowsRegistryFinder`, :class:`importlib.abc.ResourceLoader`, :meth:`importlib.abc.SourceLoader.path_mtime`. diff --git a/Misc/NEWS.d/next/Library/2024-12-17-12-41-07.gh-issue-126742.l07qvT.rst b/Misc/NEWS.d/next/Library/2024-12-17-12-41-07.gh-issue-126742.l07qvT.rst deleted file mode 100644 index 70f7cc129f66e3..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-17-12-41-07.gh-issue-126742.l07qvT.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix support of localized error messages reported by :manpage:`dlerror(3)` and -:manpage:`gdbm_strerror ` in :mod:`ctypes` and :mod:`dbm.gnu` -functions respectively. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-12-17-13-21-52.gh-issue-127060.mv2bX6.rst b/Misc/NEWS.d/next/Library/2024-12-17-13-21-52.gh-issue-127060.mv2bX6.rst deleted file mode 100644 index 1da89e7a282147..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-17-13-21-52.gh-issue-127060.mv2bX6.rst +++ /dev/null @@ -1,2 +0,0 @@ -Set TERM environment variable to "dumb" to disable traceback colors in IDLE, -since IDLE doesn't understand ANSI escape sequences. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-12-17-16-48-02.gh-issue-115514.1yOJ7T.rst b/Misc/NEWS.d/next/Library/2024-12-17-16-48-02.gh-issue-115514.1yOJ7T.rst new file mode 100644 index 00000000000000..24e836a0b0b7f9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-17-16-48-02.gh-issue-115514.1yOJ7T.rst @@ -0,0 +1,2 @@ +Fix exceptions and incomplete writes after :class:`!asyncio._SelectorTransport` +is closed before writes are completed. diff --git a/Misc/NEWS.d/next/Library/2024-12-18-00-07-50.gh-issue-128014.F3aUbz.rst b/Misc/NEWS.d/next/Library/2024-12-18-00-07-50.gh-issue-128014.F3aUbz.rst deleted file mode 100644 index ef339a291f0ddd..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-18-00-07-50.gh-issue-128014.F3aUbz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix resetting the default window icon by passing ``default=''`` to the -:mod:`tkinter` method :meth:`!wm_iconbitmap`. diff --git a/Misc/NEWS.d/next/Library/2024-12-18-10-18-55.gh-issue-128062.E9oU7-.rst b/Misc/NEWS.d/next/Library/2024-12-18-10-18-55.gh-issue-128062.E9oU7-.rst deleted file mode 100644 index d8e262e0848077..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-18-10-18-55.gh-issue-128062.E9oU7-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Revert the font of :mod:`turtledemo`'s menu bar to its default value and -display the shortcut keys in the correct position. diff --git a/Misc/NEWS.d/next/Library/2024-12-19-20-46-01.gh-issue-127946.4lM3Op.rst b/Misc/NEWS.d/next/Library/2024-12-19-20-46-01.gh-issue-127946.4lM3Op.rst deleted file mode 100644 index faf1ec042bc2b9..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-19-20-46-01.gh-issue-127946.4lM3Op.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when modifying :class:`ctypes._CFuncPtr` objects concurrently on -the :term:`free threaded ` build. diff --git a/Misc/NEWS.d/next/Library/2024-12-20-08-44-12.gh-issue-127975.8HJwu9.rst b/Misc/NEWS.d/next/Library/2024-12-20-08-44-12.gh-issue-127975.8HJwu9.rst new file mode 100644 index 00000000000000..597fa41deb811c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-20-08-44-12.gh-issue-127975.8HJwu9.rst @@ -0,0 +1 @@ +Avoid reusing quote types in :func:`ast.unparse` if not needed. diff --git a/Misc/NEWS.d/next/Library/2024-12-20-10-57-10.gh-issue-128118.mYak8i.rst b/Misc/NEWS.d/next/Library/2024-12-20-10-57-10.gh-issue-128118.mYak8i.rst deleted file mode 100644 index bc2898edfda721..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-20-10-57-10.gh-issue-128118.mYak8i.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve performance of :func:`copy.copy` by 30% via -a fast path for atomic types and container types. diff --git a/Misc/NEWS.d/next/Library/2024-12-21-03-20-12.gh-issue-128131.QpPmNt.rst b/Misc/NEWS.d/next/Library/2024-12-21-03-20-12.gh-issue-128131.QpPmNt.rst new file mode 100644 index 00000000000000..f4c4ebce10729c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-21-03-20-12.gh-issue-128131.QpPmNt.rst @@ -0,0 +1,2 @@ +Completely support random access of uncompressed unencrypted read-only +zip files obtained by :meth:`ZipFile.open `. diff --git a/Misc/NEWS.d/next/Library/2024-12-23-02-09-44.gh-issue-58956.4OdMdT.rst b/Misc/NEWS.d/next/Library/2024-12-23-02-09-44.gh-issue-58956.4OdMdT.rst new file mode 100644 index 00000000000000..b78bc5aaf44217 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-23-02-09-44.gh-issue-58956.4OdMdT.rst @@ -0,0 +1 @@ +Fixed a frame reference leak in :mod:`bdb`. diff --git a/Misc/NEWS.d/next/Library/2024-12-26-11-00-03.gh-issue-112064.mCcw3B.rst b/Misc/NEWS.d/next/Library/2024-12-26-11-00-03.gh-issue-112064.mCcw3B.rst new file mode 100644 index 00000000000000..e885add7b68c0f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-26-11-00-03.gh-issue-112064.mCcw3B.rst @@ -0,0 +1,2 @@ +Fix incorrect handling of negative read sizes in :meth:`HTTPResponse.read +`. Patch by Yury Manushkin. diff --git a/Misc/NEWS.d/next/Library/2024-12-29-00-33-34.gh-issue-128317.WgFina.rst b/Misc/NEWS.d/next/Library/2024-12-29-00-33-34.gh-issue-128317.WgFina.rst deleted file mode 100644 index 4441108014569e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-29-00-33-34.gh-issue-128317.WgFina.rst +++ /dev/null @@ -1,2 +0,0 @@ -Highlight today in colour in :mod:`calendar`'s CLI output. Patch by Hugo van -Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-12-30-19-53-14.gh-issue-91279.EeOJk1.rst b/Misc/NEWS.d/next/Library/2024-12-30-19-53-14.gh-issue-91279.EeOJk1.rst new file mode 100644 index 00000000000000..30ee2ea5efd069 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-30-19-53-14.gh-issue-91279.EeOJk1.rst @@ -0,0 +1,3 @@ +:meth:`zipfile.ZipFile.writestr` now respect ``SOURCE_DATE_EPOCH`` that +distributions can set centrally and have build tools consume this in order +to produce reproducible output. diff --git a/Misc/NEWS.d/next/Library/2024-12-30-20-48-28.gh-issue-88834.RIvgwc.rst b/Misc/NEWS.d/next/Library/2024-12-30-20-48-28.gh-issue-88834.RIvgwc.rst deleted file mode 100644 index ca43f914880ba3..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-12-30-20-48-28.gh-issue-88834.RIvgwc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Unify the instance check for :class:`typing.Union` and -:class:`types.UnionType`: :class:`!Union` now uses the instance checks -against its parameters instead of the subclass checks. diff --git a/Misc/NEWS.d/next/Library/2025-01-02-13-05-16.gh-issue-128400.5N43fF.rst b/Misc/NEWS.d/next/Library/2025-01-02-13-05-16.gh-issue-128400.5N43fF.rst deleted file mode 100644 index 4033dea4eaf7bf..00000000000000 --- a/Misc/NEWS.d/next/Library/2025-01-02-13-05-16.gh-issue-128400.5N43fF.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when using :func:`faulthandler.dump_traceback` while other threads -are active on the :term:`free threaded ` build. diff --git a/Misc/NEWS.d/next/Library/2025-01-02-15-20-17.gh-issue-128400.UMiG4f.rst b/Misc/NEWS.d/next/Library/2025-01-02-15-20-17.gh-issue-128400.UMiG4f.rst deleted file mode 100644 index f9d5f84224c8dc..00000000000000 --- a/Misc/NEWS.d/next/Library/2025-01-02-15-20-17.gh-issue-128400.UMiG4f.rst +++ /dev/null @@ -1,2 +0,0 @@ -Only show the current thread in :mod:`faulthandler` on the :term:`free -threaded ` build to prevent races. diff --git a/Misc/NEWS.d/next/Library/2025-01-02-20-34-04.gh-issue-128427.onPoQZ.rst b/Misc/NEWS.d/next/Library/2025-01-02-20-34-04.gh-issue-128427.onPoQZ.rst new file mode 100644 index 00000000000000..54cae43702ded7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-02-20-34-04.gh-issue-128427.onPoQZ.rst @@ -0,0 +1,2 @@ +:const:`uuid.NIL` and :const:`uuid.MAX` are now available to represent the Nil +and Max UUID formats as defined by :rfc:`9562`. diff --git a/Misc/NEWS.d/next/Library/2025-01-04-11-10-04.gh-issue-128479.jvOrF-.rst b/Misc/NEWS.d/next/Library/2025-01-04-11-10-04.gh-issue-128479.jvOrF-.rst new file mode 100644 index 00000000000000..fc3b4d5a5273a6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-04-11-10-04.gh-issue-128479.jvOrF-.rst @@ -0,0 +1 @@ +Fix :func:`!asyncio.staggered.staggered_race` leaking tasks and issuing an unhandled exception. diff --git a/Misc/NEWS.d/next/Library/2025-01-04-20-51-48.gh-issue-128509.3gr_-O.rst b/Misc/NEWS.d/next/Library/2025-01-04-20-51-48.gh-issue-128509.3gr_-O.rst new file mode 100644 index 00000000000000..ba45884304f662 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-04-20-51-48.gh-issue-128509.3gr_-O.rst @@ -0,0 +1,2 @@ +Add :func:`sys._is_immortal` for identifying :term:`immortal` objects at +runtime. diff --git a/Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst b/Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst new file mode 100644 index 00000000000000..2ca592be20b681 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst @@ -0,0 +1,5 @@ +Add locking to :mod:`warnings` to avoid some data races when free-threading +is used. Change ``_warnings_runtime_state.mutex`` to be a recursive mutex +and expose it to :mod:`warnings`, via the :func:`!_acquire_lock` and +:func:`!_release_lock` functions. The lock is held when ``filters`` and +``_filters_version`` are updated. diff --git a/Misc/NEWS.d/next/Library/2025-01-07-21-48-32.gh-issue-128498.n6jtlW.rst b/Misc/NEWS.d/next/Library/2025-01-07-21-48-32.gh-issue-128498.n6jtlW.rst new file mode 100644 index 00000000000000..9a241e37c20a44 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-07-21-48-32.gh-issue-128498.n6jtlW.rst @@ -0,0 +1,2 @@ +Default to stdout isatty for color detection instead of stderr. Patch by +Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2025-01-08-22-30-38.gh-issue-128636.jQfWXj.rst b/Misc/NEWS.d/next/Library/2025-01-08-22-30-38.gh-issue-128636.jQfWXj.rst new file mode 100644 index 00000000000000..80c9840b585530 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-08-22-30-38.gh-issue-128636.jQfWXj.rst @@ -0,0 +1,2 @@ +Fix PyREPL failure when :data:`os.environ` is overwritten with an invalid +value. diff --git a/Misc/NEWS.d/next/Library/2025-01-09-16-20-34.gh-issue-128156.GfObBq.rst b/Misc/NEWS.d/next/Library/2025-01-09-16-20-34.gh-issue-128156.GfObBq.rst new file mode 100644 index 00000000000000..ec6a55040ae6cb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-09-16-20-34.gh-issue-128156.GfObBq.rst @@ -0,0 +1,3 @@ +When using macOS system ``libffi``, support for complex types in +:mod:`ctypes` is now checked at runtime (macOS 10.15 or newer). The types +must also be available at build time. diff --git a/Misc/NEWS.d/next/Library/2025-01-10-13-06-54.gh-issue-118761.f8oADD.rst b/Misc/NEWS.d/next/Library/2025-01-10-13-06-54.gh-issue-118761.f8oADD.rst new file mode 100644 index 00000000000000..37c25cb2efd034 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-10-13-06-54.gh-issue-118761.f8oADD.rst @@ -0,0 +1,5 @@ +Improve the performance of :func:`base64.b16decode` by up to ten times +by more efficiently checking the byte-string for hexadecimal digits. +Reduce the import time of :mod:`base64` by up to six times, +by no longer importing :mod:`re`. +Patch by Bénédikt Tran, Chris Markiewicz, and Adam Turner. diff --git a/Misc/NEWS.d/next/Library/2025-01-13-07-54-32.gh-issue-128308.kYSDRF.rst b/Misc/NEWS.d/next/Library/2025-01-13-07-54-32.gh-issue-128308.kYSDRF.rst new file mode 100644 index 00000000000000..efa613876a35fd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-13-07-54-32.gh-issue-128308.kYSDRF.rst @@ -0,0 +1 @@ +Support the *name* keyword argument for eager tasks in :func:`asyncio.loop.create_task`, :func:`asyncio.create_task` and :func:`asyncio.TaskGroup.create_task`, by passing on all *kwargs* to the task factory set by :func:`asyncio.loop.set_task_factory`. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-09-45-43.gh-issue-118761.TvAC8E.rst b/Misc/NEWS.d/next/Library/2025-01-15-09-45-43.gh-issue-118761.TvAC8E.rst new file mode 100644 index 00000000000000..38d18b7f4ca05e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-09-45-43.gh-issue-118761.TvAC8E.rst @@ -0,0 +1,3 @@ +Reduce the import time of :mod:`csv` by up to five times, by importing +:mod:`re` on demand. In particular, ``re`` is no more implicitly exposed +as ``csv.re``. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-18-54-48.gh-issue-118761.G1dv6E.rst b/Misc/NEWS.d/next/Library/2025-01-15-18-54-48.gh-issue-118761.G1dv6E.rst new file mode 100644 index 00000000000000..4144ef8f40e6dd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-18-54-48.gh-issue-118761.G1dv6E.rst @@ -0,0 +1,2 @@ +Reduce the import time of :mod:`optparse` when no help text is printed. +Patch by Eli Schwartz. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-19-16-50.gh-issue-118761.cbW2ZL.rst b/Misc/NEWS.d/next/Library/2025-01-15-19-16-50.gh-issue-118761.cbW2ZL.rst new file mode 100644 index 00000000000000..0eef8777512dd8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-19-16-50.gh-issue-118761.cbW2ZL.rst @@ -0,0 +1,3 @@ +Reduce import time of :mod:`gettext` by up to ten times, by importing +:mod:`re` on demand. In particular, ``re`` is no longer implicitly +exposed as ``gettext.re``. Patch by Eli Schwartz. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-19-32-23.gh-issue-128891.ojUxKo.rst b/Misc/NEWS.d/next/Library/2025-01-15-19-32-23.gh-issue-128891.ojUxKo.rst new file mode 100644 index 00000000000000..79d845bbab7cfc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-19-32-23.gh-issue-128891.ojUxKo.rst @@ -0,0 +1 @@ +Add specialized opcodes to ``opcode.opname``. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-21-41-51.gh-issue-128679.tq10F2.rst b/Misc/NEWS.d/next/Library/2025-01-15-21-41-51.gh-issue-128679.tq10F2.rst new file mode 100644 index 00000000000000..5c108da5703c00 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-21-41-51.gh-issue-128679.tq10F2.rst @@ -0,0 +1,3 @@ +:mod:`tracemalloc`: Fix race conditions when :func:`tracemalloc.stop` is +called by a thread, while other threads are tracing memory allocations. +Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2025-01-16-10-06-40.gh-issue-118761.z100LC.rst b/Misc/NEWS.d/next/Library/2025-01-16-10-06-40.gh-issue-118761.z100LC.rst new file mode 100644 index 00000000000000..ea71ecaaeb2936 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-16-10-06-40.gh-issue-118761.z100LC.rst @@ -0,0 +1,2 @@ +Improve import time of :mod:`tomllib` by removing ``typing``, ``string``, +and ``tomllib._types`` imports. Patch by Taneli Hukkinen. diff --git a/Misc/NEWS.d/next/Library/2025-01-17-11-46-16.gh-issue-128916.GEePbO.rst b/Misc/NEWS.d/next/Library/2025-01-17-11-46-16.gh-issue-128916.GEePbO.rst new file mode 100644 index 00000000000000..f2db341ef81621 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-17-11-46-16.gh-issue-128916.GEePbO.rst @@ -0,0 +1,3 @@ +Do not attempt to set ``SO_REUSEPORT`` on sockets of address families +other than ``AF_INET`` and ``AF_INET6``, as it is meaningless with these +address families, and the call with fail with Linux kernel 6.12.9 and newer. diff --git a/Misc/NEWS.d/next/Library/2025-01-17-17-20-51.gh-issue-128894.gX1-8J.rst b/Misc/NEWS.d/next/Library/2025-01-17-17-20-51.gh-issue-128894.gX1-8J.rst new file mode 100644 index 00000000000000..7e015103a95713 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-17-17-20-51.gh-issue-128894.gX1-8J.rst @@ -0,0 +1,2 @@ +Fix ``traceback.TracebackException._format_syntax_error`` not to fail on +exceptions with custom metadata. diff --git a/Misc/NEWS.d/next/Library/2025-01-17-21-33-11.gh-issue-128961.XwvyIZ.rst b/Misc/NEWS.d/next/Library/2025-01-17-21-33-11.gh-issue-128961.XwvyIZ.rst new file mode 100644 index 00000000000000..9c985df77743da --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-17-21-33-11.gh-issue-128961.XwvyIZ.rst @@ -0,0 +1 @@ +Fix a crash when setting state on an exhausted :class:`array.array` iterator. diff --git a/Misc/NEWS.d/next/Library/2025-01-18-11-04-44.gh-issue-128978.hwg7-w.rst b/Misc/NEWS.d/next/Library/2025-01-18-11-04-44.gh-issue-128978.hwg7-w.rst new file mode 100644 index 00000000000000..521496d6a2f8c2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-18-11-04-44.gh-issue-128978.hwg7-w.rst @@ -0,0 +1,2 @@ +Fix a :exc:`NameError` in :func:`!sysconfig.expand_makefile_vars`. Patch by +Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-01-18-11-24-02.gh-issue-118761.G8MmxY.rst b/Misc/NEWS.d/next/Library/2025-01-18-11-24-02.gh-issue-118761.G8MmxY.rst new file mode 100644 index 00000000000000..3b3f3f7d98c5d6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-18-11-24-02.gh-issue-118761.G8MmxY.rst @@ -0,0 +1,2 @@ +Reduce import time of :mod:`pstats` and :mod:`zipfile` by up to 20%, by +removing unnecessary imports to :mod:`typing`. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-01-18-16-58-10.gh-issue-128991.EzJit9.rst b/Misc/NEWS.d/next/Library/2025-01-18-16-58-10.gh-issue-128991.EzJit9.rst new file mode 100644 index 00000000000000..64fa04fb53e89c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-18-16-58-10.gh-issue-128991.EzJit9.rst @@ -0,0 +1 @@ +Release the enter frame reference within :mod:`bdb` callback diff --git a/Misc/NEWS.d/next/Library/2025-01-20-13-12-39.gh-issue-128550.AJ5TOL.rst b/Misc/NEWS.d/next/Library/2025-01-20-13-12-39.gh-issue-128550.AJ5TOL.rst new file mode 100644 index 00000000000000..f59feac795ea18 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-20-13-12-39.gh-issue-128550.AJ5TOL.rst @@ -0,0 +1 @@ +Removed an incorrect optimization relating to eager tasks in :class:`asyncio.TaskGroup` that resulted in cancellations being missed. diff --git a/Misc/NEWS.d/next/Library/2025-01-20-16-02-38.gh-issue-129064.JXasgJ.rst b/Misc/NEWS.d/next/Library/2025-01-20-16-02-38.gh-issue-129064.JXasgJ.rst new file mode 100644 index 00000000000000..93a1eda350d1fc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-20-16-02-38.gh-issue-129064.JXasgJ.rst @@ -0,0 +1,2 @@ +Deprecate :func:`!sysconfig.expand_makefile_vars`, in favor of using +:func:`sysconfig.get_paths` with the ``vars`` argument. diff --git a/Misc/NEWS.d/next/Library/2025-01-20-20-59-26.gh-issue-92897.G0xH8o.rst b/Misc/NEWS.d/next/Library/2025-01-20-20-59-26.gh-issue-92897.G0xH8o.rst new file mode 100644 index 00000000000000..632ca03bbf8dd2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-20-20-59-26.gh-issue-92897.G0xH8o.rst @@ -0,0 +1,2 @@ +Scheduled the deprecation of the ``check_home`` argument of +:func:`sysconfig.is_python_build` to Python 3.15. diff --git a/Misc/NEWS.d/next/Library/2025-01-21-18-52-32.gh-issue-129061.4idD_B.rst b/Misc/NEWS.d/next/Library/2025-01-21-18-52-32.gh-issue-129061.4idD_B.rst new file mode 100644 index 00000000000000..5c5c05e1161e86 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-21-18-52-32.gh-issue-129061.4idD_B.rst @@ -0,0 +1 @@ +Fix FORCE_COLOR and NO_COLOR when empty strings. Patch by Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2025-01-22-16-54-25.gh-issue-129205.FMqrUt.rst b/Misc/NEWS.d/next/Library/2025-01-22-16-54-25.gh-issue-129205.FMqrUt.rst new file mode 100644 index 00000000000000..c4ed76408f32f6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-22-16-54-25.gh-issue-129205.FMqrUt.rst @@ -0,0 +1 @@ +Add :func:`os.readinto` to read into a :ref:`buffer object ` from a file descriptor. diff --git a/Misc/NEWS.d/next/Library/2025-01-24-10-48-32.gh-issue-129195.89d5NU.rst b/Misc/NEWS.d/next/Library/2025-01-24-10-48-32.gh-issue-129195.89d5NU.rst new file mode 100644 index 00000000000000..daf7297387dabd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-24-10-48-32.gh-issue-129195.89d5NU.rst @@ -0,0 +1 @@ +Support reporting call graph information from :func:`!asyncio.staggered.staggered_race`. diff --git a/Misc/NEWS.d/next/Library/2025-01-26-10-01-21.gh-issue-129005.ncpLvw.rst b/Misc/NEWS.d/next/Library/2025-01-26-10-01-21.gh-issue-129005.ncpLvw.rst new file mode 100644 index 00000000000000..a825e9d244d525 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-26-10-01-21.gh-issue-129005.ncpLvw.rst @@ -0,0 +1 @@ +Optimize ``_pyio.FileIO.readinto`` by avoiding unnecessary objects and copies using :func:`os.readinto`. diff --git a/Misc/NEWS.d/next/Library/2025-01-27-14-05-19.gh-issue-129346.gZRd3g.rst b/Misc/NEWS.d/next/Library/2025-01-27-14-05-19.gh-issue-129346.gZRd3g.rst new file mode 100644 index 00000000000000..b5377277f6c51c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-27-14-05-19.gh-issue-129346.gZRd3g.rst @@ -0,0 +1,2 @@ +In :mod:`sqlite3`, handle out-of-memory when creating user-defined SQL +functions. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-10-53-32.gh-issue-118761.i8wjpV.rst b/Misc/NEWS.d/next/Library/2025-01-29-10-53-32.gh-issue-118761.i8wjpV.rst new file mode 100644 index 00000000000000..0762cbe5d63949 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-10-53-32.gh-issue-118761.i8wjpV.rst @@ -0,0 +1,2 @@ +Improve import time of :mod:`subprocess` by lazy importing ``locale`` and +``signal``. Patch by Taneli Hukkinen. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-11-14-20.gh-issue-118761.gMZwE1.rst b/Misc/NEWS.d/next/Library/2025-01-29-11-14-20.gh-issue-118761.gMZwE1.rst new file mode 100644 index 00000000000000..c2474795d8233e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-11-14-20.gh-issue-118761.gMZwE1.rst @@ -0,0 +1,2 @@ +Always lazy import ``warnings`` in :mod:`threading`. Patch by Taneli +Hukkinen. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-13-37-18.gh-issue-126400.DaBaR3.rst b/Misc/NEWS.d/next/Library/2025-01-29-13-37-18.gh-issue-126400.DaBaR3.rst new file mode 100644 index 00000000000000..1532faf4b7d6f5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-13-37-18.gh-issue-126400.DaBaR3.rst @@ -0,0 +1,2 @@ +Add a socket *timeout* keyword argument to +:class:`logging.handlers.SysLogHandler`. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-14-30-54.gh-issue-129409.JZbOE6.rst b/Misc/NEWS.d/next/Library/2025-01-29-14-30-54.gh-issue-129409.JZbOE6.rst new file mode 100644 index 00000000000000..7e00b44c0ef471 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-14-30-54.gh-issue-129409.JZbOE6.rst @@ -0,0 +1,2 @@ +Fix an integer overflow in the :mod:`csv` module when writing a data field +larger than 2GB. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-17-10-00.gh-issue-129403.314159.rst b/Misc/NEWS.d/next/Library/2025-01-29-17-10-00.gh-issue-129403.314159.rst new file mode 100644 index 00000000000000..0c2bdd3136e3a3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-17-10-00.gh-issue-129403.314159.rst @@ -0,0 +1 @@ +Corrected :exc:`ValueError` message for :class:`asyncio.Barrier` and :class:`threading.Barrier`. diff --git a/Misc/NEWS.d/next/Library/2025-01-31-11-14-05.gh-issue-129502.j_ArNo.rst b/Misc/NEWS.d/next/Library/2025-01-31-11-14-05.gh-issue-129502.j_ArNo.rst new file mode 100644 index 00000000000000..e9e9d12c11d0ac --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-31-11-14-05.gh-issue-129502.j_ArNo.rst @@ -0,0 +1,5 @@ +Unlikely errors in preparing arguments for :mod:`ctypes` callback are now +handled in the same way as errors raised in the callback of in converting +the result of the callback -- using :func:`sys.unraisablehook` instead of +:func:`sys.excepthook` and not setting :data:`sys.last_exc` and other +variables. diff --git a/Misc/NEWS.d/next/Security/2024-05-24-21-00-52.gh-issue-119511.jKrXQ8.rst b/Misc/NEWS.d/next/Security/2024-05-24-21-00-52.gh-issue-119511.jKrXQ8.rst new file mode 100644 index 00000000000000..f7b4031120e643 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-05-24-21-00-52.gh-issue-119511.jKrXQ8.rst @@ -0,0 +1,7 @@ +Fix a potential denial of service in the :mod:`imaplib` module. When connecting +to a malicious server, it could cause an arbitrary amount of memory to be +allocated. On many systems this is harmless as unused virtual memory is only a +mapping, but if this hit a virtual address size limit it could lead to a +:exc:`MemoryError` or other process crash. On unusual systems or builds where +all allocated memory is touched and backed by actual ram or storage it could've +consumed resources doing so until similarly crashing. diff --git a/Misc/NEWS.d/next/Security/2024-08-06-11-43-08.gh-issue-80222.wfR4BU.rst b/Misc/NEWS.d/next/Security/2024-08-06-11-43-08.gh-issue-80222.wfR4BU.rst new file mode 100644 index 00000000000000..0f0661d0b1cf4a --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-08-06-11-43-08.gh-issue-80222.wfR4BU.rst @@ -0,0 +1,6 @@ +Fix bug in the folding of quoted strings when flattening an email message using +a modern email policy. Previously when a quoted string was folded so that +it spanned more than one line, the surrounding quotes and internal escapes +would be omitted. This could theoretically be used to spoof header lines +using a carefully constructed quoted string if the resulting rendered email +was transmitted or re-parsed. diff --git a/Misc/NEWS.d/next/Security/2024-10-29-09-15-10.gh-issue-126108.eTIjHY.rst b/Misc/NEWS.d/next/Security/2024-10-29-09-15-10.gh-issue-126108.eTIjHY.rst new file mode 100644 index 00000000000000..9f2c7e84d4dff0 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-10-29-09-15-10.gh-issue-126108.eTIjHY.rst @@ -0,0 +1 @@ +Fix a possible ``NULL`` pointer dereference in :c:func:`!PySys_AddWarnOptionUnicode`. diff --git a/Misc/NEWS.d/next/Security/2025-01-28-14-08-03.gh-issue-105704.EnhHxu.rst b/Misc/NEWS.d/next/Security/2025-01-28-14-08-03.gh-issue-105704.EnhHxu.rst new file mode 100644 index 00000000000000..bff1bc6b0d609c --- /dev/null +++ b/Misc/NEWS.d/next/Security/2025-01-28-14-08-03.gh-issue-105704.EnhHxu.rst @@ -0,0 +1,4 @@ +When using :func:`urllib.parse.urlsplit` and :func:`urllib.parse.urlparse` host +parsing would not reject domain names containing square brackets (``[`` and +``]``). Square brackets are only valid for IPv6 and IPvFuture hosts according to +`RFC 3986 Section 3.2.2 `__. diff --git a/Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst b/Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst new file mode 100644 index 00000000000000..7dc807757c5ec0 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst @@ -0,0 +1,2 @@ +Disable ``test_embed`` test cases that segfault on BOLT instrument binaries. +The tests are only disabled when BOLT is enabled. diff --git a/Misc/NEWS.d/next/Tests/2025-01-30-13-09-27.gh-issue-129386.iNtbEi.rst b/Misc/NEWS.d/next/Tests/2025-01-30-13-09-27.gh-issue-129386.iNtbEi.rst new file mode 100644 index 00000000000000..a03f596bc46c30 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2025-01-30-13-09-27.gh-issue-129386.iNtbEi.rst @@ -0,0 +1,2 @@ +Add ``test.support.reset_code``, which can be used to reset various +bytecode-level optimizations and local instrumentation for a function. diff --git a/Misc/NEWS.d/next/Tools-Demos/2025-01-03-23-51-07.gh-issue-128152.IhzElS.rst b/Misc/NEWS.d/next/Tools-Demos/2025-01-03-23-51-07.gh-issue-128152.IhzElS.rst deleted file mode 100644 index 9657e138e9911b..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2025-01-03-23-51-07.gh-issue-128152.IhzElS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a bug where Argument Clinic's C pre-processor parser tried to parse -pre-processor directives inside C comments. Patch by Erlend Aasland. diff --git a/Misc/NEWS.d/next/Tools-Demos/2025-01-24-14-49-40.gh-issue-129248.JAapG2.rst b/Misc/NEWS.d/next/Tools-Demos/2025-01-24-14-49-40.gh-issue-129248.JAapG2.rst new file mode 100644 index 00000000000000..e3c781cf10cf50 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2025-01-24-14-49-40.gh-issue-129248.JAapG2.rst @@ -0,0 +1,2 @@ +The iOS test runner now strips the log prefix from each line output by the +test suite. diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index b4d785f65639a5..316c266b7e4fd6 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -1280,11 +1280,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "9dcb50e3f9c3245972731be5da0b28e7583198d9" + "checksumValue": "5d6fdd98730584f74f7b731da6e488fe234504b3" }, { "algorithm": "SHA256", - "checksumValue": "7cac49fef5e9d952ec9390bf81c54d83f1b5da32fdf76091c2f0770ed943b7fe" + "checksumValue": "d74f365463166891f62e1326d22b2d39d865776b7ea5e0df2aea5eede4d85b0f" } ], "fileName": "Modules/_decimal/libmpdec/io.c" diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index f9e51f0683c965..9317be605f0065 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -1253,6 +1253,7 @@ added = '3.2' [function.PySequence_Fast] added = '3.2' + abi_only = true [function.PySequence_GetItem] added = '3.2' [function.PySequence_GetSlice] @@ -2540,3 +2541,7 @@ added = '3.14' [function.PyType_Freeze] added = '3.14' +[function.Py_PACK_FULL_VERSION] + added = '3.14' +[function.Py_PACK_VERSION] + added = '3.14' diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 52c0f883d383db..6bb05a06a3465d 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -162,8 +162,8 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c _testcapi/config.c -@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c _testcapi/config.c _testcapi/import.c _testcapi/frame.c _testcapi/type.c _testcapi/function.c +@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/import.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c _testlimitedcapi/version.c _testlimitedcapi/file.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c @MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c diff --git a/Modules/_abc.c b/Modules/_abc.c index 4f4b24b035db4a..d6a953b336025d 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -67,6 +67,8 @@ typedef struct { uint64_t _abc_negative_cache_version; } _abc_data; +#define _abc_data_CAST(op) ((_abc_data *)(op)) + static inline uint64_t get_cache_version(_abc_data *impl) { @@ -88,8 +90,9 @@ set_cache_version(_abc_data *impl, uint64_t version) } static int -abc_data_traverse(_abc_data *self, visitproc visit, void *arg) +abc_data_traverse(PyObject *op, visitproc visit, void *arg) { + _abc_data *self = _abc_data_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->_abc_registry); Py_VISIT(self->_abc_cache); @@ -98,8 +101,9 @@ abc_data_traverse(_abc_data *self, visitproc visit, void *arg) } static int -abc_data_clear(_abc_data *self) +abc_data_clear(PyObject *op) { + _abc_data *self = _abc_data_CAST(op); Py_CLEAR(self->_abc_registry); Py_CLEAR(self->_abc_cache); Py_CLEAR(self->_abc_negative_cache); @@ -107,7 +111,7 @@ abc_data_clear(_abc_data *self) } static void -abc_data_dealloc(_abc_data *self) +abc_data_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); PyTypeObject *tp = Py_TYPE(self); @@ -212,7 +216,7 @@ _destroy(PyObject *setweakref, PyObject *objweakref) } static PyMethodDef _destroy_def = { - "_destroy", (PyCFunction) _destroy, METH_O + "_destroy", _destroy, METH_O }; static int @@ -964,7 +968,7 @@ _abcmodule_clear(PyObject *module) static void _abcmodule_free(void *module) { - _abcmodule_clear((PyObject *)module); + (void)_abcmodule_clear((PyObject *)module); } static PyModuleDef_Slot _abcmodule_slots[] = { diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index b8b184af04a7cb..b488fd92aa6817 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -9,6 +9,7 @@ #include "pycore_llist.h" // struct llist_node #include "pycore_modsupport.h" // _PyArg_CheckPositional() #include "pycore_moduleobject.h" // _PyModule_GetState() +#include "pycore_object.h" // _PyObject_SetMaybeWeakref #include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -40,12 +41,17 @@ typedef enum { PyObject *prefix##_source_tb; \ PyObject *prefix##_cancel_msg; \ PyObject *prefix##_cancelled_exc; \ + PyObject *prefix##_awaited_by; \ fut_state prefix##_state; \ - /* These bitfields need to be at the end of the struct - so that these and bitfields from TaskObj are contiguous. + /* Used by profilers to make traversing the stack from an external \ + process faster. */ \ + char prefix##_is_task; \ + char prefix##_awaited_by_is_set; \ + /* These bitfields need to be at the end of the struct \ + so that these and bitfields from TaskObj are contiguous. \ */ \ unsigned prefix##_log_tb: 1; \ - unsigned prefix##_blocking: 1; + unsigned prefix##_blocking: 1; \ typedef struct { FutureObj_HEAD(fut) @@ -69,12 +75,24 @@ typedef struct { PyObject *sw_arg; } TaskStepMethWrapper; - #define Future_CheckExact(state, obj) Py_IS_TYPE(obj, state->FutureType) #define Task_CheckExact(state, obj) Py_IS_TYPE(obj, state->TaskType) -#define Future_Check(state, obj) PyObject_TypeCheck(obj, state->FutureType) -#define Task_Check(state, obj) PyObject_TypeCheck(obj, state->TaskType) +#define Future_Check(state, obj) \ + (Future_CheckExact(state, obj) \ + || PyObject_TypeCheck(obj, state->FutureType)) + +#define Task_Check(state, obj) \ + (Task_CheckExact(state, obj) \ + || PyObject_TypeCheck(obj, state->TaskType)) + +// This macro is optimized to quickly return for native Future *or* Task +// objects by inlining fast "exact" checks to be called first. +#define TaskOrFuture_Check(state, obj) \ + (Task_CheckExact(state, obj) \ + || Future_CheckExact(state, obj) \ + || PyObject_TypeCheck(obj, state->FutureType) \ + || PyObject_TypeCheck(obj, state->TaskType)) #ifdef Py_GIL_DISABLED # define ASYNCIO_STATE_LOCK(state) Py_BEGIN_CRITICAL_SECTION_MUT(&state->mutex) @@ -84,6 +102,37 @@ typedef struct { # define ASYNCIO_STATE_UNLOCK(state) ((void)state) #endif +typedef struct _Py_AsyncioModuleDebugOffsets { + struct _asyncio_task_object { + uint64_t size; + uint64_t task_name; + uint64_t task_awaited_by; + uint64_t task_is_task; + uint64_t task_awaited_by_is_set; + uint64_t task_coro; + } asyncio_task_object; + struct _asyncio_thread_state { + uint64_t size; + uint64_t asyncio_running_loop; + uint64_t asyncio_running_task; + } asyncio_thread_state; +} Py_AsyncioModuleDebugOffsets; + +GENERATE_DEBUG_SECTION(AsyncioDebug, Py_AsyncioModuleDebugOffsets AsyncioDebug) + = {.asyncio_task_object = { + .size = sizeof(TaskObj), + .task_name = offsetof(TaskObj, task_name), + .task_awaited_by = offsetof(TaskObj, task_awaited_by), + .task_is_task = offsetof(TaskObj, task_is_task), + .task_awaited_by_is_set = offsetof(TaskObj, task_awaited_by_is_set), + .task_coro = offsetof(TaskObj, task_coro), + }, + .asyncio_thread_state = { + .size = sizeof(_PyThreadStateImpl), + .asyncio_running_loop = offsetof(_PyThreadStateImpl, asyncio_running_loop), + .asyncio_running_task = offsetof(_PyThreadStateImpl, asyncio_running_task), + }}; + /* State of the _asyncio module */ typedef struct { #ifdef Py_GIL_DISABLED @@ -185,6 +234,22 @@ static PyObject * task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *result); +static void +clear_task_coro(TaskObj *task) +{ + Py_CLEAR(task->task_coro); +} + + +static void +set_task_coro(TaskObj *task, PyObject *coro) +{ + assert(coro != NULL); + Py_INCREF(coro); + Py_XSETREF(task->task_coro, coro); +} + + static int _is_coroutine(asyncio_state *state, PyObject *coro) { @@ -437,10 +502,13 @@ future_init(FutureObj *fut, PyObject *loop) Py_CLEAR(fut->fut_source_tb); Py_CLEAR(fut->fut_cancel_msg); Py_CLEAR(fut->fut_cancelled_exc); + Py_CLEAR(fut->fut_awaited_by); fut->fut_state = STATE_PENDING; fut->fut_log_tb = 0; fut->fut_blocking = 0; + fut->fut_awaited_by_is_set = 0; + fut->fut_is_task = 0; if (loop == Py_None) { asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); @@ -480,6 +548,115 @@ future_init(FutureObj *fut, PyObject *loop) return 0; } +static int +future_awaited_by_add(asyncio_state *state, PyObject *fut, PyObject *thing) +{ + if (!TaskOrFuture_Check(state, fut) || !TaskOrFuture_Check(state, thing)) { + // We only want to support native asyncio Futures. + // For further insight see the comment in the Python + // implementation of "future_add_to_awaited_by()". + return 0; + } + + FutureObj *_fut = (FutureObj *)fut; + + /* Most futures/task are only awaited by one entity, so we want + to avoid always creating a set for `fut_awaited_by`. + */ + if (_fut->fut_awaited_by == NULL) { + assert(!_fut->fut_awaited_by_is_set); + Py_INCREF(thing); + _fut->fut_awaited_by = thing; + return 0; + } + + if (_fut->fut_awaited_by_is_set) { + assert(PySet_CheckExact(_fut->fut_awaited_by)); + return PySet_Add(_fut->fut_awaited_by, thing); + } + + PyObject *set = PySet_New(NULL); + if (set == NULL) { + return -1; + } + if (PySet_Add(set, thing)) { + Py_DECREF(set); + return -1; + } + if (PySet_Add(set, _fut->fut_awaited_by)) { + Py_DECREF(set); + return -1; + } + Py_SETREF(_fut->fut_awaited_by, set); + _fut->fut_awaited_by_is_set = 1; + return 0; +} + +static int +future_awaited_by_discard(asyncio_state *state, PyObject *fut, PyObject *thing) +{ + if (!TaskOrFuture_Check(state, fut) || !TaskOrFuture_Check(state, thing)) { + // We only want to support native asyncio Futures. + // For further insight see the comment in the Python + // implementation of "future_add_to_awaited_by()". + return 0; + } + + FutureObj *_fut = (FutureObj *)fut; + + /* Following the semantics of 'set.discard()' here in not + raising an error if `thing` isn't in the `awaited_by` "set". + */ + if (_fut->fut_awaited_by == NULL) { + return 0; + } + if (_fut->fut_awaited_by == thing) { + Py_CLEAR(_fut->fut_awaited_by); + return 0; + } + if (_fut->fut_awaited_by_is_set) { + assert(PySet_CheckExact(_fut->fut_awaited_by)); + int err = PySet_Discard(_fut->fut_awaited_by, thing); + if (err < 0) { + return -1; + } else { + return 0; + } + } + return 0; +} + +/*[clinic input] +@critical_section +@getter +_asyncio.Future._asyncio_awaited_by +[clinic start generated code]*/ + +static PyObject * +_asyncio_Future__asyncio_awaited_by_get_impl(FutureObj *self) +/*[clinic end generated code: output=932af76d385d2e2a input=64c1783df2d44d2b]*/ +{ + /* Implementation of a Python getter. */ + if (self->fut_awaited_by == NULL) { + Py_RETURN_NONE; + } + if (self->fut_awaited_by_is_set) { + /* Already a set, just wrap it into a frozen set and return. */ + assert(PySet_CheckExact(self->fut_awaited_by)); + return PyFrozenSet_New(self->fut_awaited_by); + } + + PyObject *set = PyFrozenSet_New(NULL); + if (set == NULL) { + return NULL; + } + if (PySet_Add(set, self->fut_awaited_by)) { + Py_DECREF(set); + return NULL; + } + return set; +} + static PyObject * future_set_result(asyncio_state *state, FutureObj *fut, PyObject *res) { @@ -780,6 +957,8 @@ FutureObj_clear(FutureObj *fut) Py_CLEAR(fut->fut_source_tb); Py_CLEAR(fut->fut_cancel_msg); Py_CLEAR(fut->fut_cancelled_exc); + Py_CLEAR(fut->fut_awaited_by); + fut->fut_awaited_by_is_set = 0; PyObject_ClearManagedDict((PyObject *)fut); return 0; } @@ -798,6 +977,7 @@ FutureObj_traverse(FutureObj *fut, visitproc visit, void *arg) Py_VISIT(fut->fut_source_tb); Py_VISIT(fut->fut_cancel_msg); Py_VISIT(fut->fut_cancelled_exc); + Py_VISIT(fut->fut_awaited_by); PyObject_VisitManagedDict((PyObject *)fut, visit, arg); return 0; } @@ -1535,7 +1715,8 @@ FutureObj_finalize(FutureObj *fut) if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { - PyErr_WriteUnraisable(func); + PyErr_FormatUnraisable("Exception ignored while calling asyncio " + "function %R", func); } else { Py_DECREF(res); @@ -1577,6 +1758,7 @@ static PyGetSetDef FutureType_getsetlist[] = { _ASYNCIO_FUTURE__LOG_TRACEBACK_GETSETDEF _ASYNCIO_FUTURE__SOURCE_TRACEBACK_GETSETDEF _ASYNCIO_FUTURE__CANCEL_MESSAGE_GETSETDEF + _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF {NULL} /* Sentinel */ }; @@ -1883,6 +2065,8 @@ static int task_call_step_soon(asyncio_state *state, TaskObj *, PyObject *); static PyObject * task_wakeup(TaskObj *, PyObject *); static PyObject * task_step(asyncio_state *, TaskObj *, PyObject *); static int task_eager_start(asyncio_state *state, TaskObj *task); +static inline void clear_ts_asyncio_running_task(PyObject *loop); +static inline void set_ts_asyncio_running_task(PyObject *loop, PyObject *task); /* ----- Task._step wrapper */ @@ -2053,7 +2237,10 @@ enter_task(asyncio_state *state, PyObject *loop, PyObject *task) Py_DECREF(item); return -1; } - Py_DECREF(item); + + assert(task == item); + Py_CLEAR(item); + set_ts_asyncio_running_task(loop, task); return 0; } @@ -2078,7 +2265,6 @@ leave_task_predicate(PyObject *item, void *task) static int leave_task(asyncio_state *state, PyObject *loop, PyObject *task) -/*[clinic end generated code: output=0ebf6db4b858fb41 input=51296a46313d1ad8]*/ { int res = _PyDict_DelItemIf(state->current_tasks, loop, leave_task_predicate, task); @@ -2086,6 +2272,7 @@ leave_task(asyncio_state *state, PyObject *loop, PyObject *task) // task was not found return err_leave_task(Py_None, task); } + clear_ts_asyncio_running_task(loop); return res; } @@ -2112,6 +2299,7 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) { PyObject *prev_task; + clear_ts_asyncio_running_task(loop); if (task == Py_None) { if (PyDict_Pop(state->current_tasks, loop, &prev_task) < 0) { return NULL; @@ -2131,9 +2319,63 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) Py_BEGIN_CRITICAL_SECTION(current_tasks); prev_task = swap_current_task_lock_held(current_tasks, loop, hash, task); Py_END_CRITICAL_SECTION(); + set_ts_asyncio_running_task(loop, task); return prev_task; } +static inline void +set_ts_asyncio_running_task(PyObject *loop, PyObject *task) +{ + // We want to enable debuggers and profilers to be able to quickly + // introspect the asyncio running state from another process. + // When we do that, we need to essentially traverse the address space + // of a Python process and understand what every Python thread in it is + // currently doing, mainly: + // + // * current frame + // * current asyncio task + // + // A naive solution would be to require profilers and debuggers to + // find the current task in the "_asynciomodule" module state, but + // unfortunately that would require a lot of complicated remote + // memory reads and logic, as Python's dict is a notoriously complex + // and ever-changing data structure. + // + // So the easier solution is to put a strong reference to the currently + // running `asyncio.Task` on the current thread state (the current loop + // is also stored there.) + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + if (ts->asyncio_running_loop == loop) { + // Protect from a situation when someone calls this method + // from another thread. This shouldn't ever happen though, + // as `enter_task` and `leave_task` can either be called by: + // + // - `asyncio.Task` itself, in `Task.__step()`. That method + // can only be called by the event loop itself. + // + // - third-party Task "from scratch" implementations, that + // our `capture_call_graph` API doesn't support anyway. + // + // That said, we still want to make sure we don't end up in + // a broken state, so we check that we're in the correct thread + // by comparing the *loop* argument to the event loop running + // in the current thread. If they match we know we're in the + // right thread, as asyncio event loops don't change threads. + assert(ts->asyncio_running_task == NULL); + ts->asyncio_running_task = Py_NewRef(task); + } +} + +static inline void +clear_ts_asyncio_running_task(PyObject *loop) +{ + // See comment in set_ts_asyncio_running_task() for details. + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + if (ts->asyncio_running_loop == NULL || ts->asyncio_running_loop == loop) { + Py_CLEAR(ts->asyncio_running_task); + } +} + /* ----- Task */ /*[clinic input] @@ -2158,6 +2400,7 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, if (future_init((FutureObj*)self, loop)) { return -1; } + self->task_is_task = 1; asyncio_state *state = get_asyncio_state_by_def((PyObject *)self); int is_coro = is_coroutine(state, coro); @@ -2185,8 +2428,7 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, self->task_must_cancel = 0; self->task_log_destroy_pending = 1; self->task_num_cancels_requested = 0; - Py_INCREF(coro); - Py_XSETREF(self->task_coro, coro); + set_task_coro(self, coro); if (name == Py_None) { // optimization: defer task name formatting @@ -2226,6 +2468,11 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, if (task_call_step_soon(state, self, NULL)) { return -1; } +#ifdef Py_GIL_DISABLED + // This is required so that _Py_TryIncref(self) + // works correctly in non-owning threads. + _PyObject_SetMaybeWeakref((PyObject *)self); +#endif register_task(state, self); return 0; } @@ -2234,8 +2481,8 @@ static int TaskObj_clear(TaskObj *task) { (void)FutureObj_clear((FutureObj*) task); + clear_task_coro(task); Py_CLEAR(task->task_context); - Py_CLEAR(task->task_coro); Py_CLEAR(task->task_name); Py_CLEAR(task->task_fut_waiter); return 0; @@ -2260,6 +2507,7 @@ TaskObj_traverse(TaskObj *task, visitproc visit, void *arg) Py_VISIT(fut->fut_source_tb); Py_VISIT(fut->fut_cancel_msg); Py_VISIT(fut->fut_cancelled_exc); + Py_VISIT(fut->fut_awaited_by); PyObject_VisitManagedDict((PyObject *)fut, visit, arg); return 0; } @@ -2731,7 +2979,8 @@ TaskObj_finalize(TaskObj *task) if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { - PyErr_WriteUnraisable(func); + PyErr_FormatUnraisable("Exception ignored while calling asyncio " + "function %R", func); } else { Py_DECREF(res); @@ -3050,6 +3299,10 @@ task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *resu goto yield_insteadof_yf; } + if (future_awaited_by_add(state, result, (PyObject *)task)) { + goto fail; + } + fut->fut_blocking = 0; /* result.add_done_callback(task._wakeup) */ @@ -3139,6 +3392,10 @@ task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *resu goto yield_insteadof_yf; } + if (future_awaited_by_add(state, result, (PyObject *)task)) { + goto fail; + } + /* result._asyncio_future_blocking = False */ if (PyObject_SetAttr( result, &_Py_ID(_asyncio_future_blocking), Py_False) == -1) { @@ -3335,7 +3592,7 @@ task_eager_start(asyncio_state *state, TaskObj *task) register_task(state, task); } else { // This seems to really help performance on pyperformance benchmarks - Py_CLEAR(task->task_coro); + clear_task_coro(task); } return retval; @@ -3350,6 +3607,11 @@ task_wakeup_lock_held(TaskObj *task, PyObject *o) assert(o); asyncio_state *state = get_asyncio_state_by_def((PyObject *)task); + + if (future_awaited_by_discard(state, o, (PyObject *)task)) { + return NULL; + } + if (Future_CheckExact(state, o) || Task_CheckExact(state, o)) { PyObject *fut_result = NULL; int res; @@ -3772,11 +4034,20 @@ _asyncio_all_tasks_impl(PyObject *module, PyObject *loop) llist_for_each_safe(node, &state->asyncio_tasks_head) { TaskObj *task = llist_data(node, TaskObj, task_node); - if (PyList_Append(tasks, (PyObject *)task) < 0) { - Py_DECREF(tasks); - Py_DECREF(loop); - err = 1; - break; + // The linked list holds borrowed references to task + // as such it is possible that the task is concurrently + // deallocated while added to this list. + // To protect against concurrent deallocations, + // we first try to incref the task which would fail + // if it is concurrently getting deallocated in another thread, + // otherwise it gets added to the list. + if (_Py_TryIncref((PyObject *)task)) { + if (_PyList_AppendTakeRef((PyListObject *)tasks, (PyObject *)task) < 0) { + Py_DECREF(tasks); + Py_DECREF(loop); + err = 1; + break; + } } } ASYNCIO_STATE_UNLOCK(state); @@ -3824,6 +4095,50 @@ _asyncio_all_tasks_impl(PyObject *module, PyObject *loop) return res; } +/*[clinic input] +_asyncio.future_add_to_awaited_by + + fut: object + waiter: object + / + +Record that `fut` is awaited on by `waiter`. + +[clinic start generated code]*/ + +static PyObject * +_asyncio_future_add_to_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter) +/*[clinic end generated code: output=0ab9a1a63389e4df input=06e6eaac51f532b9]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (future_awaited_by_add(state, fut, waiter)) { + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +_asyncio.future_discard_from_awaited_by + + fut: object + waiter: object + / + +[clinic start generated code]*/ + +static PyObject * +_asyncio_future_discard_from_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter) +/*[clinic end generated code: output=a03b0b4323b779de input=3833f7639e88e483]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (future_awaited_by_discard(state, fut, waiter)) { + return NULL; + } + Py_RETURN_NONE; +} + static int module_traverse(PyObject *mod, visitproc visit, void *arg) { @@ -3887,6 +4202,7 @@ module_clear(PyObject *mod) // those get cleared in PyThreadState_Clear. _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); Py_CLEAR(ts->asyncio_running_loop); + Py_CLEAR(ts->asyncio_running_task); return 0; } @@ -3917,7 +4233,6 @@ module_init(asyncio_state *state) goto fail; } - state->context_kwname = Py_BuildValue("(s)", "context"); if (state->context_kwname == NULL) { goto fail; @@ -3998,6 +4313,8 @@ static PyMethodDef asyncio_methods[] = { _ASYNCIO__LEAVE_TASK_METHODDEF _ASYNCIO__SWAP_CURRENT_TASK_METHODDEF _ASYNCIO_ALL_TASKS_METHODDEF + _ASYNCIO_FUTURE_ADD_TO_AWAITED_BY_METHODDEF + _ASYNCIO_FUTURE_DISCARD_FROM_AWAITED_BY_METHODDEF {NULL, NULL} }; diff --git a/Modules/_bz2module.c b/Modules/_bz2module.c index 661847ad26702e..9e85e0de42cd8d 100644 --- a/Modules/_bz2module.c +++ b/Modules/_bz2module.c @@ -129,6 +129,9 @@ typedef struct { PyThread_type_lock lock; } BZ2Decompressor; +#define _BZ2Compressor_CAST(op) ((BZ2Compressor *)(op)) +#define _BZ2Decompressor_CAST(op) ((BZ2Decompressor *)(op)) + /* Helper functions. */ static int @@ -376,8 +379,9 @@ _bz2_BZ2Compressor_impl(PyTypeObject *type, int compresslevel) } static void -BZ2Compressor_dealloc(BZ2Compressor *self) +BZ2Compressor_dealloc(PyObject *op) { + BZ2Compressor *self = _BZ2Compressor_CAST(op); BZ2_bzCompressEnd(&self->bzs); if (self->lock != NULL) { PyThread_free_lock(self->lock); @@ -388,7 +392,7 @@ BZ2Compressor_dealloc(BZ2Compressor *self) } static int -BZ2Compressor_traverse(BZ2Compressor *self, visitproc visit, void *arg) +BZ2Compressor_traverse(PyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return 0; @@ -680,8 +684,10 @@ _bz2_BZ2Decompressor_impl(PyTypeObject *type) } static void -BZ2Decompressor_dealloc(BZ2Decompressor *self) +BZ2Decompressor_dealloc(PyObject *op) { + BZ2Decompressor *self = _BZ2Decompressor_CAST(op); + if(self->input_buffer != NULL) { PyMem_Free(self->input_buffer); } @@ -697,7 +703,7 @@ BZ2Decompressor_dealloc(BZ2Decompressor *self) } static int -BZ2Decompressor_traverse(BZ2Decompressor *self, visitproc visit, void *arg) +BZ2Decompressor_traverse(PyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return 0; diff --git a/Modules/_csv.c b/Modules/_csv.c index 7ca30e39e00c0c..e5ae853590bf2c 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -1138,7 +1138,7 @@ join_append_data(WriterObj *self, int field_kind, const void *field_data, int copy_phase) { DialectObj *dialect = self->dialect; - int i; + Py_ssize_t i; Py_ssize_t rec_len; #define INCLEN \ diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index ede95bdf98bf76..7c0ac1a57f534c 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -146,9 +146,12 @@ typedef struct { PyObject *dict; } DictRemoverObject; +#define _DictRemoverObject_CAST(op) ((DictRemoverObject *)(op)) + static int -_DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg) +_DictRemover_traverse(PyObject *myself, visitproc visit, void *arg) { + DictRemoverObject *self = _DictRemoverObject_CAST(myself); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->key); Py_VISIT(self->dict); @@ -156,8 +159,9 @@ _DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg) } static int -_DictRemover_clear(DictRemoverObject *self) +_DictRemover_clear(PyObject *myself) { + DictRemoverObject *self = _DictRemoverObject_CAST(myself); Py_CLEAR(self->key); Py_CLEAR(self->dict); return 0; @@ -167,9 +171,8 @@ static void _DictRemover_dealloc(PyObject *myself) { PyTypeObject *tp = Py_TYPE(myself); - DictRemoverObject *self = (DictRemoverObject *)myself; PyObject_GC_UnTrack(myself); - (void)_DictRemover_clear(self); + (void)_DictRemover_clear(myself); tp->tp_free(myself); Py_DECREF(tp); } @@ -177,10 +180,11 @@ _DictRemover_dealloc(PyObject *myself) static PyObject * _DictRemover_call(PyObject *myself, PyObject *args, PyObject *kw) { - DictRemoverObject *self = (DictRemoverObject *)myself; + DictRemoverObject *self = _DictRemoverObject_CAST(myself); if (self->key && self->dict) { if (-1 == PyDict_DelItem(self->dict, self->key)) { - PyErr_FormatUnraisable("Exception ignored on calling _ctypes.DictRemover"); + PyErr_FormatUnraisable("Exception ignored while " + "calling _ctypes.DictRemover"); } Py_CLEAR(self->key); Py_CLEAR(self->dict); @@ -402,16 +406,19 @@ typedef struct { PyObject *keep; // If set, a reference to the original CDataObject. } StructParamObject; +#define _StructParamObject_CAST(op) ((StructParamObject *)(op)) + static int -StructParam_traverse(StructParamObject *self, visitproc visit, void *arg) +StructParam_traverse(PyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return 0; } static int -StructParam_clear(StructParamObject *self) +StructParam_clear(PyObject *myself) { + StructParamObject *self = _StructParamObject_CAST(myself); Py_CLEAR(self->keep); return 0; } @@ -419,10 +426,10 @@ StructParam_clear(StructParamObject *self) static void StructParam_dealloc(PyObject *myself) { - StructParamObject *self = (StructParamObject *)myself; + StructParamObject *self = _StructParamObject_CAST(myself); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(myself); - (void)StructParam_clear(self); + (void)StructParam_clear(myself); PyMem_Free(self->ptr); tp->tp_free(myself); Py_DECREF(tp); @@ -457,7 +464,8 @@ CType_Type_traverse(PyObject *self, visitproc visit, void *arg) { StgInfo *info = _PyStgInfo_FromType_NoState(self); if (!info) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while " + "calling ctypes traverse function %R", self); } if (info) { Py_VISIT(info->proto); @@ -488,7 +496,8 @@ CType_Type_clear(PyObject *self) { StgInfo *info = _PyStgInfo_FromType_NoState(self); if (!info) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while " + "clearing ctypes %R", self); } if (info) { ctype_clear_stginfo(info); @@ -501,7 +510,8 @@ CType_Type_dealloc(PyObject *self) { StgInfo *info = _PyStgInfo_FromType_NoState(self); if (!info) { - PyErr_WriteUnraisable(NULL); // NULL avoids segfault here + PyErr_FormatUnraisable("Exception ignored while " + "deallocating ctypes %R", self); } if (info) { PyMem_Free(info->ffi_type_pointer.elements); @@ -598,7 +608,7 @@ StructUnionType_paramfunc(ctypes_state *st, CDataObject *self) if (ptr == NULL) { return NULL; } - memcpy(ptr, self->b_ptr, self->b_size); + locked_memcpy_from(ptr, self, self->b_size); /* Create a Python object which calls PyMem_Free(ptr) in its deallocator. The object will be destroyed @@ -675,7 +685,7 @@ StructUnionType_init(PyObject *self, PyObject *args, PyObject *kwds, int isStruc info->paramfunc = StructUnionType_paramfunc; - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_fields_), &fields) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_fields_), &fields) < 0) { Py_DECREF(attrdict); return -1; } @@ -907,8 +917,7 @@ CDataType_from_buffer_copy_impl(PyObject *type, PyTypeObject *cls, result = generic_pycdata_new(st, (PyTypeObject *)type, NULL, NULL); if (result != NULL) { - memcpy(((CDataObject *)result)->b_ptr, - (char *)buffer->buf + offset, info->size); + locked_memcpy_to((CDataObject *) result, (char *)buffer->buf + offset, info->size); } return result; } @@ -1195,7 +1204,7 @@ PyCPointerType_paramfunc(ctypes_state *st, CDataObject *self) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; parg->obj = Py_NewRef(self); - parg->value.p = *(void **)self->b_ptr; + parg->value.p = locked_deref(self); return parg; } @@ -1412,11 +1421,12 @@ static PyType_Spec pycpointer_type_spec = { */ static int -CharArray_set_raw(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +CharArray_set_raw(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { char *ptr; Py_ssize_t size; Py_buffer view; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); @@ -1432,7 +1442,7 @@ CharArray_set_raw(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) goto fail; } - memcpy(self->b_ptr, ptr, size); + locked_memcpy_to(self, ptr, size); PyBuffer_Release(&view); return 0; @@ -1442,27 +1452,38 @@ CharArray_set_raw(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) } static PyObject * -CharArray_get_raw(CDataObject *self, void *Py_UNUSED(ignored)) +CharArray_get_raw(PyObject *op, void *Py_UNUSED(ignored)) { - return PyBytes_FromStringAndSize(self->b_ptr, self->b_size); + PyObject *res; + CDataObject *self = _CDataObject_CAST(op); + LOCK_PTR(self); + res = PyBytes_FromStringAndSize(self->b_ptr, self->b_size); + UNLOCK_PTR(self); + return res; } static PyObject * -CharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored)) +CharArray_get_value(PyObject *op, void *Py_UNUSED(ignored)) { Py_ssize_t i; + PyObject *res; + CDataObject *self = _CDataObject_CAST(op); + LOCK_PTR(self); char *ptr = self->b_ptr; for (i = 0; i < self->b_size; ++i) if (*ptr++ == '\0') break; - return PyBytes_FromStringAndSize(self->b_ptr, i); + res = PyBytes_FromStringAndSize(self->b_ptr, i); + UNLOCK_PTR(self); + return res; } static int -CharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +CharArray_set_value(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { const char *ptr; Py_ssize_t size; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -1486,36 +1507,43 @@ CharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored) } ptr = PyBytes_AS_STRING(value); + LOCK_PTR(self); memcpy(self->b_ptr, ptr, size); if (size < self->b_size) self->b_ptr[size] = '\0'; + UNLOCK_PTR(self); Py_DECREF(value); return 0; } static PyGetSetDef CharArray_getsets[] = { - { "raw", (getter)CharArray_get_raw, (setter)CharArray_set_raw, - "value", NULL }, - { "value", (getter)CharArray_get_value, (setter)CharArray_set_value, - "string value"}, + { "raw", CharArray_get_raw, CharArray_set_raw, "value", NULL }, + { "value", CharArray_get_value, CharArray_set_value, "string value" }, { NULL, NULL } }; static PyObject * -WCharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored)) +WCharArray_get_value(PyObject *op, void *Py_UNUSED(ignored)) { Py_ssize_t i; + PyObject *res; + CDataObject *self = _CDataObject_CAST(op); wchar_t *ptr = (wchar_t *)self->b_ptr; + LOCK_PTR(self); for (i = 0; i < self->b_size/(Py_ssize_t)sizeof(wchar_t); ++i) if (*ptr++ == (wchar_t)0) break; - return PyUnicode_FromWideChar((wchar_t *)self->b_ptr, i); + res = PyUnicode_FromWideChar((wchar_t *)self->b_ptr, i); + UNLOCK_PTR(self); + return res; } static int -WCharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +WCharArray_set_value(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { + CDataObject *self = _CDataObject_CAST(op); + if (value == NULL) { PyErr_SetString(PyExc_TypeError, "can't delete attribute"); @@ -1540,15 +1568,15 @@ WCharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored PyErr_SetString(PyExc_ValueError, "string too long"); return -1; } - if (PyUnicode_AsWideChar(value, (wchar_t *)self->b_ptr, size) < 0) { - return -1; - } - return 0; + Py_ssize_t rc; + LOCK_PTR(self); + rc = PyUnicode_AsWideChar(value, (wchar_t *)self->b_ptr, size); + UNLOCK_PTR(self); + return rc < 0 ? -1 : 0; } static PyGetSetDef WCharArray_getsets[] = { - { "value", (getter)WCharArray_get_value, (setter)WCharArray_set_value, - "string value"}, + { "value", WCharArray_get_value, WCharArray_set_value, "string value" }, { NULL, NULL } }; @@ -1762,11 +1790,6 @@ class _ctypes.c_void_p "PyObject *" "clinic_state_sub()->PyCSimpleType_Type" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=dd4d9646c56f43a9]*/ -#if defined(Py_HAVE_C_COMPLEX) && defined(Py_FFI_SUPPORT_C_COMPLEX) -static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdCEFfuzZqQPXOv?g"; -#else -static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdfuzZqQPXOv?g"; -#endif /*[clinic input] _ctypes.c_wchar_p.from_param as c_wchar_p_from_param @@ -2053,6 +2076,7 @@ c_void_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value) parg->pffi_type = &ffi_type_pointer; parg->tag = 'P'; Py_INCREF(value); + // Function pointers don't change their contents, no need to lock parg->value.p = *(void **)func->b_ptr; parg->obj = value; return (PyObject *)parg; @@ -2079,7 +2103,7 @@ c_void_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value) parg->tag = 'Z'; parg->obj = Py_NewRef(value); /* Remember: b_ptr points to where the pointer is stored! */ - parg->value.p = *(void **)(((CDataObject *)value)->b_ptr); + parg->value.p = locked_deref((CDataObject *)value); return (PyObject *)parg; } } @@ -2196,7 +2220,7 @@ PyCSimpleType_paramfunc(ctypes_state *st, CDataObject *self) parg->tag = fmt[0]; parg->pffi_type = fd->pffi_type; parg->obj = Py_NewRef(self); - memcpy(&parg->value, self->b_ptr, self->b_size); + locked_memcpy_from(&parg->value, self, self->b_size); return parg; } @@ -2237,17 +2261,13 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds) "which must be a string of length 1"); goto error; } - if (!strchr(SIMPLE_TYPE_CHARS, *proto_str)) { + fmt = _ctypes_get_fielddesc(proto_str); + if (!fmt) { PyErr_Format(PyExc_AttributeError, "class must define a '_type_' attribute which must be\n" - "a single character string containing one of '%s'.", - SIMPLE_TYPE_CHARS); - goto error; - } - fmt = _ctypes_get_fielddesc(proto_str); - if (fmt == NULL) { - PyErr_Format(PyExc_ValueError, - "_type_ '%s' not supported", proto_str); + "a single character string containing one of the\n" + "supported types: '%s'.", + _ctypes_get_simple_type_chars()); goto error; } @@ -2621,7 +2641,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) stginfo->getfunc = NULL; stginfo->ffi_type_pointer = ffi_type_pointer; - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_flags_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_flags_), &ob) < 0) { return -1; } if (!ob || !PyLong_Check(ob)) { @@ -2634,7 +2654,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) Py_DECREF(ob); /* _argtypes_ is optional... */ - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_argtypes_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_argtypes_), &ob) < 0) { return -1; } if (ob) { @@ -2647,7 +2667,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) stginfo->converters = converters; } - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_restype_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_restype_), &ob) < 0) { return -1; } if (ob) { @@ -2669,7 +2689,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) } } /* XXX later, maybe. - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py _ID(_errcheck_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py _ID(_errcheck_), &ob) < 0) { return -1; } if (ob) { @@ -2697,7 +2717,7 @@ PyCFuncPtrType_paramfunc(ctypes_state *st, CDataObject *self) parg->tag = 'P'; parg->pffi_type = &ffi_type_pointer; parg->obj = Py_NewRef(self); - parg->value.p = *(void **)self->b_ptr; + parg->value.p = locked_deref(self); return parg; } @@ -2871,8 +2891,9 @@ class _ctypes.PyCData "PyObject *" "clinic_state()->PyCData_Type" static int -PyCData_traverse(CDataObject *self, visitproc visit, void *arg) +PyCData_traverse(PyObject *op, visitproc visit, void *arg) { + CDataObject *self = _CDataObject_CAST(op); Py_VISIT(self->b_objects); Py_VISIT((PyObject *)self->b_base); PyTypeObject *type = Py_TYPE(self); @@ -2881,8 +2902,9 @@ PyCData_traverse(CDataObject *self, visitproc visit, void *arg) } static int -PyCData_clear(CDataObject *self) +PyCData_clear(PyObject *op) { + CDataObject *self = _CDataObject_CAST(op); Py_CLEAR(self->b_objects); if ((self->b_needsfree) && _CDataObject_HasExternalBuffer(self)) @@ -2897,7 +2919,7 @@ PyCData_dealloc(PyObject *self) { PyTypeObject *type = Py_TYPE(self); PyObject_GC_UnTrack(self); - PyCData_clear((CDataObject *)self); + (void)PyCData_clear(self); type->tp_free(self); Py_DECREF(type); } @@ -2940,7 +2962,7 @@ PyCData_item_type(ctypes_state *st, PyObject *type) static int PyCData_NewGetBuffer(PyObject *myself, Py_buffer *view, int flags) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *info; @@ -2999,7 +3021,7 @@ static PyObject * PyCData_reduce_impl(PyObject *myself, PyTypeObject *cls) /*[clinic end generated code: output=1a025ccfdd8c935d input=34097a5226ea63c1]*/ { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); ctypes_state *st = get_module_state_by_class(cls); StgInfo *info; @@ -3017,8 +3039,12 @@ PyCData_reduce_impl(PyObject *myself, PyTypeObject *cls) if (dict == NULL) { return NULL; } + PyObject *bytes; + LOCK_PTR(self); + bytes = PyBytes_FromStringAndSize(self->b_ptr, self->b_size); + UNLOCK_PTR(self); return Py_BuildValue("O(O(NN))", st->_unpickle, Py_TYPE(myself), dict, - PyBytes_FromStringAndSize(self->b_ptr, self->b_size)); + bytes); } static PyObject * @@ -3028,7 +3054,7 @@ PyCData_setstate(PyObject *myself, PyObject *args) Py_ssize_t len; int res; PyObject *dict, *mydict; - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (!PyArg_ParseTuple(args, "O!s#", &PyDict_Type, &dict, &data, &len)) { @@ -3036,7 +3062,10 @@ PyCData_setstate(PyObject *myself, PyObject *args) } if (len > self->b_size) len = self->b_size; + // XXX Can we use locked_memcpy_to()? + LOCK_PTR(self); memmove(self->b_ptr, data, len); + UNLOCK_PTR(self); mydict = PyObject_GetAttrString(myself, "__dict__"); if (mydict == NULL) { return NULL; @@ -3094,6 +3123,12 @@ static PyType_Spec pycdata_spec = { static int PyCData_MallocBuffer(CDataObject *obj, StgInfo *info) { + /* We don't have to lock in this function, because it's only + * used in constructors and therefore does not have concurrent + * access. + */ + assert (Py_REFCNT(obj) == 1); + if ((size_t)info->size <= sizeof(obj->b_value)) { /* No need to call malloc, can use the default buffer */ obj->b_ptr = (char *)&obj->b_value; @@ -3219,15 +3254,25 @@ PyObject * PyCData_get(ctypes_state *st, PyObject *type, GETFUNC getfunc, PyObject *src, Py_ssize_t index, Py_ssize_t size, char *adr) { - if (getfunc) - return getfunc(adr, size); + CDataObject *cdata = _CDataObject_CAST(src); + if (getfunc) { + PyObject *res; + LOCK_PTR(cdata); + res = getfunc(adr, size); + UNLOCK_PTR(cdata); + return res; + } assert(type); StgInfo *info; if (PyStgInfo_FromType(st, type, &info) < 0) { return NULL; } if (info && info->getfunc && !_ctypes_simple_instance(st, type)) { - return info->getfunc(adr, size); + PyObject *res; + LOCK_PTR(cdata); + res = info->getfunc(adr, size); + UNLOCK_PTR(cdata); + return res; } return PyCData_FromBaseObj(st, type, src, index, adr); } @@ -3244,15 +3289,24 @@ _PyCData_set(ctypes_state *st, int err; if (setfunc) { - return setfunc(ptr, value, size); + PyObject *res; + LOCK_PTR(dst); + res = setfunc(ptr, value, size); + UNLOCK_PTR(dst); + return res; } if (!CDataObject_Check(st, value)) { StgInfo *info; if (PyStgInfo_FromType(st, type, &info) < 0) { return NULL; } - if (info && info->setfunc) - return info->setfunc(ptr, value, size); + if (info && info->setfunc) { + PyObject *res; + LOCK_PTR(dst); + res = info->setfunc(ptr, value, size); + UNLOCK_PTR(dst); + return res; + } /* If value is a tuple, we try to call the type with the tuple and use the result! @@ -3272,7 +3326,9 @@ _PyCData_set(ctypes_state *st, Py_DECREF(ob); return result; } else if (value == Py_None && PyCPointerTypeObject_Check(st, type)) { + LOCK_PTR(dst); *(void **)ptr = NULL; + UNLOCK_PTR(dst); Py_RETURN_NONE; } else { PyErr_Format(PyExc_TypeError, @@ -3288,9 +3344,7 @@ _PyCData_set(ctypes_state *st, if (err == -1) return NULL; if (err) { - memcpy(ptr, - src->b_ptr, - size); + locked_memcpy_from(ptr, src, size); if (PyCPointerTypeObject_Check(st, type)) { /* XXX */ @@ -3324,7 +3378,9 @@ _PyCData_set(ctypes_state *st, ((PyTypeObject *)type)->tp_name); return NULL; } + LOCK_PTR(src); *(void **)ptr = src->b_ptr; + UNLOCK_PTR(src); keep = GetKeepedObjects(src); if (keep == NULL) @@ -3891,6 +3947,8 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) self->paramflags = Py_XNewRef(paramflags); + // No other threads can have this object, no need to + // lock it. *(void **)self->b_ptr = address; Py_INCREF(dll); Py_DECREF(ftuple); @@ -4349,7 +4407,7 @@ _build_result(PyObject *result, PyObject *callargs, } static PyObject * -PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) +PyCFuncPtr_call(PyObject *op, PyObject *inargs, PyObject *kwds) { PyObject *restype; PyObject *converters; @@ -4362,6 +4420,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) IUnknown *piunk = NULL; #endif void *pProc = NULL; + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); int inoutmask; int outmask; @@ -4369,7 +4428,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *info; - if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + if (PyStgInfo_FromObject(st, op, &info) < 0) { return NULL; } assert(info); /* Cannot be NULL for PyCFuncPtrObject instances */ @@ -4487,8 +4546,9 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) } static int -PyCFuncPtr_traverse(PyCFuncPtrObject *self, visitproc visit, void *arg) +PyCFuncPtr_traverse(PyObject *op, visitproc visit, void *arg) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); Py_VISIT(self->callable); Py_VISIT(self->restype); Py_VISIT(self->checker); @@ -4497,12 +4557,13 @@ PyCFuncPtr_traverse(PyCFuncPtrObject *self, visitproc visit, void *arg) Py_VISIT(self->converters); Py_VISIT(self->paramflags); Py_VISIT(self->thunk); - return PyCData_traverse((CDataObject *)self, visit, arg); + return PyCData_traverse(op, visit, arg); } static int -PyCFuncPtr_clear(PyCFuncPtrObject *self) +PyCFuncPtr_clear(PyObject *op) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); Py_CLEAR(self->callable); Py_CLEAR(self->restype); Py_CLEAR(self->checker); @@ -4511,22 +4572,23 @@ PyCFuncPtr_clear(PyCFuncPtrObject *self) Py_CLEAR(self->converters); Py_CLEAR(self->paramflags); Py_CLEAR(self->thunk); - return PyCData_clear((CDataObject *)self); + return PyCData_clear(op); } static void -PyCFuncPtr_dealloc(PyCFuncPtrObject *self) +PyCFuncPtr_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); - PyCFuncPtr_clear(self); + (void)PyCFuncPtr_clear(self); PyTypeObject *type = Py_TYPE(self); - type->tp_free((PyObject *)self); + type->tp_free(self); Py_DECREF(type); } static PyObject * -PyCFuncPtr_repr(PyCFuncPtrObject *self) +PyCFuncPtr_repr(PyObject *op) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); #ifdef MS_WIN32 if (self->index) return PyUnicode_FromFormat("", @@ -4540,8 +4602,9 @@ PyCFuncPtr_repr(PyCFuncPtrObject *self) } static int -PyCFuncPtr_bool(PyCFuncPtrObject *self) +PyCFuncPtr_bool(PyObject *op) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); return ((*(void **)self->b_ptr != NULL) #ifdef MS_WIN32 || (self->index != 0) @@ -4729,7 +4792,7 @@ static PyType_Spec pycunion_spec = { PyCArray_Type */ static int -Array_init(CDataObject *self, PyObject *args, PyObject *kw) +Array_init(PyObject *self, PyObject *args, PyObject *kw) { Py_ssize_t i; Py_ssize_t n; @@ -4743,7 +4806,7 @@ Array_init(CDataObject *self, PyObject *args, PyObject *kw) for (i = 0; i < n; ++i) { PyObject *v; v = PyTuple_GET_ITEM(args, i); - if (-1 == PySequence_SetItem((PyObject *)self, i, v)) + if (-1 == PySequence_SetItem(self, i, v)) return -1; } return 0; @@ -4752,7 +4815,7 @@ Array_init(CDataObject *self, PyObject *args, PyObject *kw) static PyObject * Array_item(PyObject *myself, Py_ssize_t index) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t offset, size; if (index < 0 || index >= self->b_length) { @@ -4763,7 +4826,7 @@ Array_item(PyObject *myself, Py_ssize_t index) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } @@ -4773,14 +4836,14 @@ Array_item(PyObject *myself, Py_ssize_t index) size = stginfo->size / stginfo->length; offset = index * size; - return PyCData_get(st, stginfo->proto, stginfo->getfunc, (PyObject *)self, - index, size, self->b_ptr + offset); + return PyCData_get(st, stginfo->proto, stginfo->getfunc, myself, + index, size, self->b_ptr + offset); } static PyObject * Array_subscript(PyObject *myself, PyObject *item) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (PyIndex_Check(item)) { Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); @@ -4804,7 +4867,7 @@ Array_subscript(PyObject *myself, PyObject *item) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for array object instances */ @@ -4823,18 +4886,24 @@ Array_subscript(PyObject *myself, PyObject *item) if (slicelen <= 0) return Py_GetConstant(Py_CONSTANT_EMPTY_BYTES); if (step == 1) { - return PyBytes_FromStringAndSize(ptr + start, - slicelen); + PyObject *res; + LOCK_PTR(self); + res = PyBytes_FromStringAndSize(ptr + start, + slicelen); + UNLOCK_PTR(self); + return res; } dest = (char *)PyMem_Malloc(slicelen); if (dest == NULL) return PyErr_NoMemory(); + LOCK_PTR(self); for (cur = start, i = 0; i < slicelen; cur += step, i++) { dest[i] = ptr[cur]; } + UNLOCK_PTR(self); np = PyBytes_FromStringAndSize(dest, slicelen); PyMem_Free(dest); @@ -4847,8 +4916,12 @@ Array_subscript(PyObject *myself, PyObject *item) if (slicelen <= 0) return Py_GetConstant(Py_CONSTANT_EMPTY_STR); if (step == 1) { - return PyUnicode_FromWideChar(ptr + start, - slicelen); + PyObject *res; + LOCK_PTR(self); + res = PyUnicode_FromWideChar(ptr + start, + slicelen); + UNLOCK_PTR(self); + return res; } dest = PyMem_New(wchar_t, slicelen); @@ -4857,10 +4930,12 @@ Array_subscript(PyObject *myself, PyObject *item) return NULL; } + LOCK_PTR(self); for (cur = start, i = 0; i < slicelen; cur += step, i++) { dest[i] = ptr[cur]; } + UNLOCK_PTR(self); np = PyUnicode_FromWideChar(dest, slicelen); PyMem_Free(dest); @@ -4893,7 +4968,7 @@ Array_subscript(PyObject *myself, PyObject *item) static int Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t size, offset; char *ptr; @@ -4905,7 +4980,7 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return -1; } assert(stginfo); /* Cannot be NULL for array object instances */ @@ -4919,14 +4994,14 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) offset = index * size; ptr = self->b_ptr + offset; - return PyCData_set(st, (PyObject *)self, stginfo->proto, stginfo->setfunc, value, - index, size, ptr); + return PyCData_set(st, myself, stginfo->proto, stginfo->setfunc, value, + index, size, ptr); } static int Array_ass_subscript(PyObject *myself, PyObject *item, PyObject *value) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -4983,7 +5058,7 @@ Array_ass_subscript(PyObject *myself, PyObject *item, PyObject *value) static Py_ssize_t Array_length(PyObject *myself) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); return self->b_length; } @@ -5100,9 +5175,10 @@ class _ctypes.Simple "PyObject *" "clinic_state()->Simple_Type" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=016c476c7aa8b8a8]*/ static int -Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +Simple_set_value(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { PyObject *result; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -5112,13 +5188,15 @@ Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *info; - if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + if (PyStgInfo_FromObject(st, op, &info) < 0) { return -1; } assert(info); /* Cannot be NULL for CDataObject instances */ assert(info->setfunc); + LOCK_PTR(self); result = info->setfunc(self->b_ptr, value, info->size); + UNLOCK_PTR(self); if (!result) return -1; @@ -5127,7 +5205,7 @@ Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) } static int -Simple_init(CDataObject *self, PyObject *args, PyObject *kw) +Simple_init(PyObject *self, PyObject *args, PyObject *kw) { PyObject *value = NULL; if (!PyArg_UnpackTuple(args, "__init__", 0, 1, &value)) @@ -5138,20 +5216,25 @@ Simple_init(CDataObject *self, PyObject *args, PyObject *kw) } static PyObject * -Simple_get_value(CDataObject *self, void *Py_UNUSED(ignored)) +Simple_get_value(PyObject *op, void *Py_UNUSED(ignored)) { + CDataObject *self = _CDataObject_CAST(op); ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *info; - if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + if (PyStgInfo_FromObject(st, op, &info) < 0) { return NULL; } assert(info); /* Cannot be NULL for CDataObject instances */ assert(info->getfunc); - return info->getfunc(self->b_ptr, self->b_size); + PyObject *res; + LOCK_PTR(self); + res = info->getfunc(self->b_ptr, self->b_size); + UNLOCK_PTR(self); + return res; } static PyGetSetDef Simple_getsets[] = { - { "value", (getter)Simple_get_value, (setter)Simple_set_value, + { "value", Simple_get_value, Simple_set_value, "current value", NULL }, { NULL, NULL } }; @@ -5173,7 +5256,7 @@ Simple_from_outparm_impl(PyObject *self, PyTypeObject *cls) return Py_NewRef(self); } /* call stginfo->getfunc */ - return Simple_get_value((CDataObject *)self, NULL); + return Simple_get_value(self, NULL); } static PyMethodDef Simple_methods[] = { @@ -5181,14 +5264,20 @@ static PyMethodDef Simple_methods[] = { { NULL, NULL }, }; -static int Simple_bool(CDataObject *self) +static int +Simple_bool(PyObject *op) { - return memcmp(self->b_ptr, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", self->b_size); + int cmp; + CDataObject *self = _CDataObject_CAST(op); + LOCK_PTR(self); + cmp = memcmp(self->b_ptr, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", self->b_size); + UNLOCK_PTR(self); + return cmp; } /* "%s(%s)" % (self.__class__.__name__, self.value) */ static PyObject * -Simple_repr(CDataObject *self) +Simple_repr(PyObject *self) { PyObject *val, *result; ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); @@ -5235,12 +5324,13 @@ static PyType_Spec pycsimple_spec = { static PyObject * Pointer_item(PyObject *myself, Py_ssize_t index) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t size; Py_ssize_t offset; PyObject *proto; + void *deref = locked_deref(self); - if (*(void **)self->b_ptr == NULL) { + if (deref == NULL) { PyErr_SetString(PyExc_ValueError, "NULL pointer access"); return NULL; @@ -5248,7 +5338,7 @@ Pointer_item(PyObject *myself, Py_ssize_t index) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for pointer object instances */ @@ -5266,14 +5356,14 @@ Pointer_item(PyObject *myself, Py_ssize_t index) size = iteminfo->size; offset = index * iteminfo->size; - return PyCData_get(st, proto, stginfo->getfunc, (PyObject *)self, - index, size, (*(char **)self->b_ptr) + offset); + return PyCData_get(st, proto, stginfo->getfunc, myself, + index, size, (char *)((char *)deref + offset)); } static int Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t size; Py_ssize_t offset; PyObject *proto; @@ -5284,7 +5374,8 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) return -1; } - if (*(void **)self->b_ptr == NULL) { + void *deref = locked_deref(self); + if (deref == NULL) { PyErr_SetString(PyExc_ValueError, "NULL pointer access"); return -1; @@ -5292,7 +5383,7 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return -1; } assert(stginfo); /* Cannot be NULL for pointer instances */ @@ -5310,14 +5401,15 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) size = iteminfo->size; offset = index * iteminfo->size; - return PyCData_set(st, (PyObject *)self, proto, stginfo->setfunc, value, - index, size, (*(char **)self->b_ptr) + offset); + return PyCData_set(st, myself, proto, stginfo->setfunc, value, + index, size, ((char *)deref + offset)); } static PyObject * -Pointer_get_contents(CDataObject *self, void *closure) +Pointer_get_contents(PyObject *self, void *closure) { - if (*(void **)self->b_ptr == NULL) { + void *deref = locked_deref(_CDataObject_CAST(self)); + if (deref == NULL) { PyErr_SetString(PyExc_ValueError, "NULL pointer access"); return NULL; @@ -5325,21 +5417,20 @@ Pointer_get_contents(CDataObject *self, void *closure) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, self, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for pointer instances */ - return PyCData_FromBaseObj(st, stginfo->proto, - (PyObject *)self, 0, - *(void **)self->b_ptr); + return PyCData_FromBaseObj(st, stginfo->proto, self, 0, deref); } static int -Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) +Pointer_set_contents(PyObject *op, PyObject *value, void *closure) { CDataObject *dst; PyObject *keep; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -5348,7 +5439,7 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) } ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, op, &stginfo) < 0) { return -1; } assert(stginfo); /* Cannot be NULL for pointer instances */ @@ -5367,7 +5458,7 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) } dst = (CDataObject *)value; - *(void **)self->b_ptr = dst->b_ptr; + locked_deref_assign(self, dst->b_ptr); /* A Pointer instance must keep the value it points to alive. So, a @@ -5387,17 +5478,15 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) } static PyGetSetDef Pointer_getsets[] = { - { "contents", (getter)Pointer_get_contents, - (setter)Pointer_set_contents, + { "contents", Pointer_get_contents, Pointer_set_contents, "the object this pointer points to (read-write)", NULL }, { NULL, NULL } }; static int -Pointer_init(CDataObject *self, PyObject *args, PyObject *kw) +Pointer_init(PyObject *self, PyObject *args, PyObject *kw) { PyObject *value = NULL; - if (!PyArg_UnpackTuple(args, "POINTER", 0, 1, &value)) return -1; if (value == NULL) @@ -5424,7 +5513,7 @@ Pointer_new(PyTypeObject *type, PyObject *args, PyObject *kw) static PyObject * Pointer_subscript(PyObject *myself, PyObject *item) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (PyIndex_Check(item)) { Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); if (i == -1 && PyErr_Occurred()) @@ -5490,7 +5579,7 @@ Pointer_subscript(PyObject *myself, PyObject *item) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for pointer instances */ @@ -5502,41 +5591,53 @@ Pointer_subscript(PyObject *myself, PyObject *item) } assert(iteminfo); if (iteminfo->getfunc == _ctypes_get_fielddesc("c")->getfunc) { - char *ptr = *(char **)self->b_ptr; + char *ptr = locked_deref(self); char *dest; if (len <= 0) return Py_GetConstant(Py_CONSTANT_EMPTY_BYTES); if (step == 1) { - return PyBytes_FromStringAndSize(ptr + start, - len); + PyObject *res; + LOCK_PTR(self); + res = PyBytes_FromStringAndSize(ptr + start, + len); + UNLOCK_PTR(self); + return res; } dest = (char *)PyMem_Malloc(len); if (dest == NULL) return PyErr_NoMemory(); + LOCK_PTR(self); for (cur = start, i = 0; i < len; cur += step, i++) { dest[i] = ptr[cur]; } + UNLOCK_PTR(self); np = PyBytes_FromStringAndSize(dest, len); PyMem_Free(dest); return np; } if (iteminfo->getfunc == _ctypes_get_fielddesc("u")->getfunc) { - wchar_t *ptr = *(wchar_t **)self->b_ptr; + wchar_t *ptr = locked_deref(self); wchar_t *dest; if (len <= 0) return Py_GetConstant(Py_CONSTANT_EMPTY_STR); if (step == 1) { - return PyUnicode_FromWideChar(ptr + start, - len); + PyObject *res; + LOCK_PTR(self); + res = PyUnicode_FromWideChar(ptr + start, + len); + UNLOCK_PTR(self); + return res; } dest = PyMem_New(wchar_t, len); if (dest == NULL) return PyErr_NoMemory(); + LOCK_PTR(self); for (cur = start, i = 0; i < len; cur += step, i++) { dest[i] = ptr[cur]; } + UNLOCK_PTR(self); np = PyUnicode_FromWideChar(dest, len); PyMem_Free(dest); return np; @@ -5560,13 +5661,13 @@ Pointer_subscript(PyObject *myself, PyObject *item) } static int -Pointer_bool(CDataObject *self) +Pointer_bool(PyObject *self) { - return (*(void **)self->b_ptr != NULL); + return locked_deref(_CDataObject_CAST(self)) != NULL; } static PyType_Slot pycpointer_slots[] = { - {Py_tp_doc, PyDoc_STR("XXX to be provided")}, + {Py_tp_doc, (void *)PyDoc_STR("XXX to be provided")}, {Py_tp_getset, Pointer_getsets}, {Py_tp_init, Pointer_init}, {Py_tp_new, Pointer_new}, @@ -5770,7 +5871,7 @@ cast(void *ptr, PyObject *src, PyObject *ctype) } } /* Should we assert that result is a pointer type? */ - memcpy(result->b_ptr, &ptr, sizeof(void *)); + locked_memcpy_to(result, &ptr, sizeof(void *)); return (PyObject *)result; failed: diff --git a/Modules/_ctypes/_ctypes_test_generated.c.h b/Modules/_ctypes/_ctypes_test_generated.c.h index 46a3e4b01e2259..d70b33eaa8b515 100644 --- a/Modules/_ctypes/_ctypes_test_generated.c.h +++ b/Modules/_ctypes/_ctypes_test_generated.c.h @@ -56,7 +56,8 @@ struct SingleInt { int a; }; - struct SingleInt value = {0}; + struct SingleInt value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SingleInt")); APPEND(PyLong_FromLong(sizeof(struct SingleInt))); APPEND(PyLong_FromLong(_Alignof(struct SingleInt))); @@ -69,7 +70,8 @@ union SingleInt_Union { int a; }; - union SingleInt_Union value = {0}; + union SingleInt_Union value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SingleInt_Union")); APPEND(PyLong_FromLong(sizeof(union SingleInt_Union))); APPEND(PyLong_FromLong(_Alignof(union SingleInt_Union))); @@ -82,7 +84,8 @@ struct SingleU32 { uint32_t a; }; - struct SingleU32 value = {0}; + struct SingleU32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SingleU32")); APPEND(PyLong_FromLong(sizeof(struct SingleU32))); APPEND(PyLong_FromLong(_Alignof(struct SingleU32))); @@ -97,7 +100,8 @@ int8_t y; uint16_t z; }; - struct SimpleStruct value = {0}; + struct SimpleStruct value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SimpleStruct")); APPEND(PyLong_FromLong(sizeof(struct SimpleStruct))); APPEND(PyLong_FromLong(_Alignof(struct SimpleStruct))); @@ -114,7 +118,8 @@ int8_t y; uint16_t z; }; - union SimpleUnion value = {0}; + union SimpleUnion value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SimpleUnion")); APPEND(PyLong_FromLong(sizeof(union SimpleUnion))); APPEND(PyLong_FromLong(_Alignof(union SimpleUnion))); @@ -136,7 +141,8 @@ int64_t i64; uint64_t u64; }; - struct ManyTypes value = {0}; + struct ManyTypes value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("ManyTypes")); APPEND(PyLong_FromLong(sizeof(struct ManyTypes))); APPEND(PyLong_FromLong(_Alignof(struct ManyTypes))); @@ -163,7 +169,8 @@ int64_t i64; uint64_t u64; }; - union ManyTypesU value = {0}; + union ManyTypesU value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("ManyTypesU")); APPEND(PyLong_FromLong(sizeof(union ManyTypesU))); APPEND(PyLong_FromLong(_Alignof(union ManyTypesU))); @@ -197,7 +204,8 @@ uint16_t z; }; }; - struct Nested value = {0}; + struct Nested value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Nested")); APPEND(PyLong_FromLong(sizeof(struct Nested))); APPEND(PyLong_FromLong(_Alignof(struct Nested))); @@ -223,7 +231,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed1 value = {0}; + struct Packed1 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed1")); APPEND(PyLong_FromLong(sizeof(struct Packed1))); APPEND(PyLong_FromLong(_Alignof(struct Packed1))); @@ -247,7 +256,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed2 value = {0}; + struct Packed2 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed2")); APPEND(PyLong_FromLong(sizeof(struct Packed2))); APPEND(PyLong_FromLong(_Alignof(struct Packed2))); @@ -271,7 +281,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed3 value = {0}; + struct Packed3 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed3")); APPEND(PyLong_FromLong(sizeof(struct Packed3))); APPEND(PyLong_FromLong(_Alignof(struct Packed3))); @@ -295,7 +306,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed4 value = {0}; + struct Packed4 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed4")); APPEND(PyLong_FromLong(sizeof(struct Packed4))); APPEND(PyLong_FromLong(_Alignof(struct Packed4))); @@ -316,7 +328,8 @@ int64_t b; int32_t c; }; - struct X86_32EdgeCase value = {0}; + struct X86_32EdgeCase value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("X86_32EdgeCase")); APPEND(PyLong_FromLong(sizeof(struct X86_32EdgeCase))); APPEND(PyLong_FromLong(_Alignof(struct X86_32EdgeCase))); @@ -333,7 +346,8 @@ unsigned int b :5; unsigned int c :7; }; - struct MSBitFieldExample value = {0}; + struct MSBitFieldExample value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("MSBitFieldExample")); APPEND(PyLong_FromLong(sizeof(struct MSBitFieldExample))); APPEND(PyLong_FromLong(_Alignof(struct MSBitFieldExample))); @@ -351,7 +365,8 @@ unsigned int may_straddle :30; unsigned int last :18; }; - struct MSStraddlingExample value = {0}; + struct MSStraddlingExample value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("MSStraddlingExample")); APPEND(PyLong_FromLong(sizeof(struct MSStraddlingExample))); APPEND(PyLong_FromLong(_Alignof(struct MSStraddlingExample))); @@ -375,7 +390,8 @@ int H :8; int I :9; }; - struct IntBits value = {0}; + struct IntBits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("IntBits")); APPEND(PyLong_FromLong(sizeof(struct IntBits))); APPEND(PyLong_FromLong(_Alignof(struct IntBits))); @@ -413,7 +429,8 @@ short R :6; short S :7; }; - struct Bits value = {0}; + struct Bits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Bits")); APPEND(PyLong_FromLong(sizeof(struct Bits))); APPEND(PyLong_FromLong(_Alignof(struct Bits))); @@ -456,7 +473,8 @@ int H :8; int I :9; }; - struct IntBits_MSVC value = {0}; + struct IntBits_MSVC value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("IntBits_MSVC")); APPEND(PyLong_FromLong(sizeof(struct IntBits_MSVC))); APPEND(PyLong_FromLong(_Alignof(struct IntBits_MSVC))); @@ -499,7 +517,8 @@ short R :6; short S :7; }; - struct Bits_MSVC value = {0}; + struct Bits_MSVC value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Bits_MSVC")); APPEND(PyLong_FromLong(sizeof(struct Bits_MSVC))); APPEND(PyLong_FromLong(_Alignof(struct Bits_MSVC))); @@ -536,7 +555,8 @@ int64_t b :62; int64_t c :1; }; - struct I64Bits value = {0}; + struct I64Bits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("I64Bits")); APPEND(PyLong_FromLong(sizeof(struct I64Bits))); APPEND(PyLong_FromLong(_Alignof(struct I64Bits))); @@ -560,7 +580,8 @@ uint64_t b :62; uint64_t c :1; }; - struct U64Bits value = {0}; + struct U64Bits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("U64Bits")); APPEND(PyLong_FromLong(sizeof(struct U64Bits))); APPEND(PyLong_FromLong(_Alignof(struct U64Bits))); @@ -584,7 +605,8 @@ int8_t b :3; int8_t c :1; }; - struct Struct331_8 value = {0}; + struct Struct331_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_8")); APPEND(PyLong_FromLong(sizeof(struct Struct331_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_8))); @@ -608,7 +630,8 @@ int8_t b :6; int8_t c :1; }; - struct Struct1x1_8 value = {0}; + struct Struct1x1_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_8")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_8))); @@ -633,7 +656,8 @@ int8_t b :6; int8_t c :1; }; - struct Struct1nx1_8 value = {0}; + struct Struct1nx1_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_8")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_8))); @@ -658,7 +682,8 @@ int8_t b :6; int8_t c :6; }; - struct Struct3xx_8 value = {0}; + struct Struct3xx_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_8")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_8))); @@ -682,7 +707,8 @@ uint8_t b :3; uint8_t c :1; }; - struct Struct331_u8 value = {0}; + struct Struct331_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u8))); @@ -706,7 +732,8 @@ uint8_t b :6; uint8_t c :1; }; - struct Struct1x1_u8 value = {0}; + struct Struct1x1_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u8))); @@ -731,7 +758,8 @@ uint8_t b :6; uint8_t c :1; }; - struct Struct1nx1_u8 value = {0}; + struct Struct1nx1_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u8))); @@ -756,7 +784,8 @@ uint8_t b :6; uint8_t c :6; }; - struct Struct3xx_u8 value = {0}; + struct Struct3xx_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u8))); @@ -780,7 +809,8 @@ int16_t b :3; int16_t c :1; }; - struct Struct331_16 value = {0}; + struct Struct331_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_16")); APPEND(PyLong_FromLong(sizeof(struct Struct331_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_16))); @@ -804,7 +834,8 @@ int16_t b :14; int16_t c :1; }; - struct Struct1x1_16 value = {0}; + struct Struct1x1_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_16")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_16))); @@ -829,7 +860,8 @@ int16_t b :14; int16_t c :1; }; - struct Struct1nx1_16 value = {0}; + struct Struct1nx1_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_16")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_16))); @@ -854,7 +886,8 @@ int16_t b :14; int16_t c :14; }; - struct Struct3xx_16 value = {0}; + struct Struct3xx_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_16")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_16))); @@ -878,7 +911,8 @@ uint16_t b :3; uint16_t c :1; }; - struct Struct331_u16 value = {0}; + struct Struct331_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u16))); @@ -902,7 +936,8 @@ uint16_t b :14; uint16_t c :1; }; - struct Struct1x1_u16 value = {0}; + struct Struct1x1_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u16))); @@ -927,7 +962,8 @@ uint16_t b :14; uint16_t c :1; }; - struct Struct1nx1_u16 value = {0}; + struct Struct1nx1_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u16))); @@ -952,7 +988,8 @@ uint16_t b :14; uint16_t c :14; }; - struct Struct3xx_u16 value = {0}; + struct Struct3xx_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u16))); @@ -976,7 +1013,8 @@ int32_t b :3; int32_t c :1; }; - struct Struct331_32 value = {0}; + struct Struct331_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_32")); APPEND(PyLong_FromLong(sizeof(struct Struct331_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_32))); @@ -1000,7 +1038,8 @@ int32_t b :30; int32_t c :1; }; - struct Struct1x1_32 value = {0}; + struct Struct1x1_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_32")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_32))); @@ -1025,7 +1064,8 @@ int32_t b :30; int32_t c :1; }; - struct Struct1nx1_32 value = {0}; + struct Struct1nx1_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_32")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_32))); @@ -1050,7 +1090,8 @@ int32_t b :30; int32_t c :30; }; - struct Struct3xx_32 value = {0}; + struct Struct3xx_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_32")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_32))); @@ -1074,7 +1115,8 @@ uint32_t b :3; uint32_t c :1; }; - struct Struct331_u32 value = {0}; + struct Struct331_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u32))); @@ -1098,7 +1140,8 @@ uint32_t b :30; uint32_t c :1; }; - struct Struct1x1_u32 value = {0}; + struct Struct1x1_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u32))); @@ -1123,7 +1166,8 @@ uint32_t b :30; uint32_t c :1; }; - struct Struct1nx1_u32 value = {0}; + struct Struct1nx1_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u32))); @@ -1148,7 +1192,8 @@ uint32_t b :30; uint32_t c :30; }; - struct Struct3xx_u32 value = {0}; + struct Struct3xx_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u32))); @@ -1172,7 +1217,8 @@ int64_t b :3; int64_t c :1; }; - struct Struct331_64 value = {0}; + struct Struct331_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_64")); APPEND(PyLong_FromLong(sizeof(struct Struct331_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_64))); @@ -1196,7 +1242,8 @@ int64_t b :62; int64_t c :1; }; - struct Struct1x1_64 value = {0}; + struct Struct1x1_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_64")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_64))); @@ -1221,7 +1268,8 @@ int64_t b :62; int64_t c :1; }; - struct Struct1nx1_64 value = {0}; + struct Struct1nx1_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_64")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_64))); @@ -1246,7 +1294,8 @@ int64_t b :62; int64_t c :62; }; - struct Struct3xx_64 value = {0}; + struct Struct3xx_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_64")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_64))); @@ -1270,7 +1319,8 @@ uint64_t b :3; uint64_t c :1; }; - struct Struct331_u64 value = {0}; + struct Struct331_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u64))); @@ -1294,7 +1344,8 @@ uint64_t b :62; uint64_t c :1; }; - struct Struct1x1_u64 value = {0}; + struct Struct1x1_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u64))); @@ -1319,7 +1370,8 @@ uint64_t b :62; uint64_t c :1; }; - struct Struct1nx1_u64 value = {0}; + struct Struct1nx1_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u64))); @@ -1344,7 +1396,8 @@ uint64_t b :62; uint64_t c :62; }; - struct Struct3xx_u64 value = {0}; + struct Struct3xx_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u64))); @@ -1367,7 +1420,8 @@ signed char a :4; int b :4; }; - struct Mixed1 value = {0}; + struct Mixed1 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed1")); APPEND(PyLong_FromLong(sizeof(struct Mixed1))); APPEND(PyLong_FromLong(_Alignof(struct Mixed1))); @@ -1389,7 +1443,8 @@ signed char a :4; int32_t b :32; }; - struct Mixed2 value = {0}; + struct Mixed2 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed2")); APPEND(PyLong_FromLong(sizeof(struct Mixed2))); APPEND(PyLong_FromLong(_Alignof(struct Mixed2))); @@ -1411,7 +1466,8 @@ signed char a :4; unsigned char b :4; }; - struct Mixed3 value = {0}; + struct Mixed3 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed3")); APPEND(PyLong_FromLong(sizeof(struct Mixed3))); APPEND(PyLong_FromLong(_Alignof(struct Mixed3))); @@ -1437,7 +1493,8 @@ short e :4; int f :24; }; - struct Mixed4 value = {0}; + struct Mixed4 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed4")); APPEND(PyLong_FromLong(sizeof(struct Mixed4))); APPEND(PyLong_FromLong(_Alignof(struct Mixed4))); @@ -1463,7 +1520,8 @@ unsigned int A :1; unsigned short B :16; }; - struct Mixed5 value = {0}; + struct Mixed5 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed5")); APPEND(PyLong_FromLong(sizeof(struct Mixed5))); APPEND(PyLong_FromLong(_Alignof(struct Mixed5))); @@ -1485,7 +1543,8 @@ unsigned long long A :1; unsigned int B :32; }; - struct Mixed6 value = {0}; + struct Mixed6 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed6")); APPEND(PyLong_FromLong(sizeof(struct Mixed6))); APPEND(PyLong_FromLong(_Alignof(struct Mixed6))); @@ -1508,7 +1567,8 @@ uint32_t B :20; uint64_t C :24; }; - struct Mixed7 value = {0}; + struct Mixed7 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed7")); APPEND(PyLong_FromLong(sizeof(struct Mixed7))); APPEND(PyLong_FromLong(_Alignof(struct Mixed7))); @@ -1532,7 +1592,8 @@ uint32_t B :32; unsigned long long C :1; }; - struct Mixed8_a value = {0}; + struct Mixed8_a value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed8_a")); APPEND(PyLong_FromLong(sizeof(struct Mixed8_a))); APPEND(PyLong_FromLong(_Alignof(struct Mixed8_a))); @@ -1556,7 +1617,8 @@ uint32_t B; unsigned long long C :1; }; - struct Mixed8_b value = {0}; + struct Mixed8_b value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed8_b")); APPEND(PyLong_FromLong(sizeof(struct Mixed8_b))); APPEND(PyLong_FromLong(_Alignof(struct Mixed8_b))); @@ -1579,7 +1641,8 @@ uint8_t A; uint32_t B :1; }; - struct Mixed9 value = {0}; + struct Mixed9 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed9")); APPEND(PyLong_FromLong(sizeof(struct Mixed9))); APPEND(PyLong_FromLong(_Alignof(struct Mixed9))); @@ -1601,7 +1664,8 @@ uint32_t A :1; uint64_t B :1; }; - struct Mixed10 value = {0}; + struct Mixed10 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed10")); APPEND(PyLong_FromLong(sizeof(struct Mixed10))); APPEND(PyLong_FromLong(_Alignof(struct Mixed10))); @@ -1623,7 +1687,8 @@ uint32_t A :1; uint64_t B :1; }; - struct Example_gh_95496 value = {0}; + struct Example_gh_95496 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_95496")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_95496))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_95496))); @@ -1655,7 +1720,8 @@ uint16_t b1 :12; }; #pragma pack(pop) - struct Example_gh_84039_bad value = {0}; + struct Example_gh_84039_bad value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_84039_bad")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_84039_bad))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_84039_bad))); @@ -1693,7 +1759,8 @@ uint8_t a7 :1; }; #pragma pack(pop) - struct Example_gh_84039_good_a value = {0}; + struct Example_gh_84039_good_a value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_84039_good_a")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_84039_good_a))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_84039_good_a))); @@ -1735,7 +1802,8 @@ uint16_t b1 :12; }; #pragma pack(pop) - struct Example_gh_84039_good value = {0}; + struct Example_gh_84039_good value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_84039_good")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_84039_good))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_84039_good))); @@ -1775,7 +1843,8 @@ uint32_t R2 :2; }; #pragma pack(pop) - struct Example_gh_73939 value = {0}; + struct Example_gh_73939 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_73939")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_73939))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_73939))); @@ -1806,7 +1875,8 @@ uint8_t b :8; uint32_t c :16; }; - struct Example_gh_86098 value = {0}; + struct Example_gh_86098 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_86098")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_86098))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_86098))); @@ -1832,7 +1902,8 @@ uint32_t c :16; }; #pragma pack(pop) - struct Example_gh_86098_pack value = {0}; + struct Example_gh_86098_pack value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_86098_pack")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_86098_pack))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_86098_pack))); @@ -1858,7 +1929,8 @@ }; signed char y; }; - struct AnonBitfields value = {0}; + struct AnonBitfields value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("AnonBitfields")); APPEND(PyLong_FromLong(sizeof(struct AnonBitfields))); APPEND(PyLong_FromLong(_Alignof(struct AnonBitfields))); diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 89c0749a093765..b84bd25af8ec2c 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -81,22 +81,6 @@ PyType_Spec cthunk_spec = { /**************************************************************/ -static void -PrintError(const char *msg, ...) -{ - char buf[512]; - PyObject *f = PySys_GetObject("stderr"); - va_list marker; - - va_start(marker, msg); - PyOS_vsnprintf(buf, sizeof(buf), msg, marker); - va_end(marker); - if (f != NULL && f != Py_None) - PyFile_WriteString(buf, f); - PyErr_Print(); -} - - #ifdef MS_WIN32 /* * We must call AddRef() on non-NULL COM pointers we receive as arguments @@ -108,26 +92,23 @@ PrintError(const char *msg, ...) * after checking for PyObject_IsTrue(), but this would probably be somewhat * slower. */ -static void +static int TryAddRef(PyObject *cnv, CDataObject *obj) { IUnknown *punk; PyObject *attrdict = _PyType_GetDict((PyTypeObject *)cnv); if (!attrdict) { - return; + return 0; } int r = PyDict_Contains(attrdict, &_Py_ID(_needs_com_addref_)); if (r <= 0) { - if (r < 0) { - PrintError("getting _needs_com_addref_"); - } - return; + return r; } punk = *(IUnknown **)obj->b_ptr; if (punk) punk->lpVtbl->AddRef(punk); - return; + return 0; } #endif @@ -162,14 +143,13 @@ static void _CallPythonObject(ctypes_state *st, StgInfo *info; if (PyStgInfo_FromType(st, cnv, &info) < 0) { - goto Done; + goto Error; } if (info && info->getfunc && !_ctypes_simple_instance(st, cnv)) { PyObject *v = info->getfunc(*pArgs, info->size); if (!v) { - PrintError("create argument %zd:\n", i); - goto Done; + goto Error; } args[i] = v; /* XXX XXX XX @@ -182,24 +162,25 @@ static void _CallPythonObject(ctypes_state *st, /* Hm, shouldn't we use PyCData_AtAddress() or something like that instead? */ CDataObject *obj = (CDataObject *)_PyObject_CallNoArgs(cnv); if (!obj) { - PrintError("create argument %zd:\n", i); - goto Done; + goto Error; } if (!CDataObject_Check(st, obj)) { + PyErr_Format(PyExc_TypeError, + "%R returned unexpected result of type %T", cnv, obj); Py_DECREF(obj); - PrintError("unexpected result of create argument %zd:\n", i); - goto Done; + goto Error; } memcpy(obj->b_ptr, *pArgs, info->size); args[i] = (PyObject *)obj; #ifdef MS_WIN32 - TryAddRef(cnv, obj); + if (TryAddRef(cnv, obj) < 0) { + goto Error; + } #endif } else { - PyErr_SetString(PyExc_TypeError, - "cannot build parameter"); - PrintError("Parsing argument %zd\n", i); - goto Done; + PyErr_Format(PyExc_TypeError, + "cannot build parameter of type %R", cnv); + goto Error; } /* XXX error handling! */ pArgs++; @@ -207,8 +188,13 @@ static void _CallPythonObject(ctypes_state *st, if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) { error_object = _ctypes_get_errobj(st, &space); - if (error_object == NULL) + if (error_object == NULL) { + PyErr_FormatUnraisable( + "Exception ignored while setting error for " + "ctypes callback function %R", + callable); goto Done; + } if (flags & FUNCFLAG_USE_ERRNO) { int temp = space[0]; space[0] = errno; @@ -225,9 +211,9 @@ static void _CallPythonObject(ctypes_state *st, result = PyObject_Vectorcall(callable, args, nargs, NULL); if (result == NULL) { - PyErr_FormatUnraisable( - "Exception ignored on calling ctypes callback function %R", - callable); + PyErr_FormatUnraisable("Exception ignored while " + "calling ctypes callback function %R", + callable); } #ifdef MS_WIN32 @@ -269,7 +255,7 @@ static void _CallPythonObject(ctypes_state *st, if (keep == NULL) { /* Could not convert callback result. */ PyErr_FormatUnraisable( - "Exception ignored on converting result " + "Exception ignored while converting result " "of ctypes callback function %R", callable); } @@ -282,7 +268,7 @@ static void _CallPythonObject(ctypes_state *st, "memory leak in callback function.", 1) == -1) { PyErr_FormatUnraisable( - "Exception ignored on converting result " + "Exception ignored while converting result " "of ctypes callback function %R", callable); } @@ -295,6 +281,14 @@ static void _CallPythonObject(ctypes_state *st, for (j = 0; j < i; j++) { Py_DECREF(args[j]); } + return; + + Error: + PyErr_FormatUnraisable( + "Exception ignored while creating argument %zd for " + "ctypes callback function %R", + i, callable); + goto Done; } static void closure_fcn(ffi_cif *cif, @@ -487,39 +481,31 @@ long Call_GetClassObject(REFCLSID rclsid, REFIID riid, LPVOID *ppv) { PyObject *func, *result; long retval; - static PyObject *context; - if (context == NULL) - context = PyUnicode_InternFromString("_ctypes.DllGetClassObject"); - - func = _PyImport_GetModuleAttrString("ctypes", "DllGetClassObject"); + func = PyImport_ImportModuleAttrString("ctypes", "DllGetClassObject"); if (!func) { - PyErr_WriteUnraisable(context ? context : Py_None); /* There has been a warning before about this already */ - return E_FAIL; + goto error; } { PyObject *py_rclsid = PyLong_FromVoidPtr((void *)rclsid); if (py_rclsid == NULL) { Py_DECREF(func); - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } PyObject *py_riid = PyLong_FromVoidPtr((void *)riid); if (py_riid == NULL) { Py_DECREF(func); Py_DECREF(py_rclsid); - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } PyObject *py_ppv = PyLong_FromVoidPtr(ppv); if (py_ppv == NULL) { Py_DECREF(py_rclsid); Py_DECREF(py_riid); Py_DECREF(func); - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } result = PyObject_CallFunctionObjArgs(func, py_rclsid, @@ -532,17 +518,21 @@ long Call_GetClassObject(REFCLSID rclsid, REFIID riid, LPVOID *ppv) } Py_DECREF(func); if (!result) { - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } retval = PyLong_AsLong(result); if (PyErr_Occurred()) { - PyErr_WriteUnraisable(context ? context : Py_None); - retval = E_FAIL; + Py_DECREF(result); + goto error; } Py_DECREF(result); return retval; + +error: + PyErr_FormatUnraisable("Exception ignored while calling " + "ctypes.DllGetClassObject"); + return E_FAIL; } STDAPI DllGetClassObject(REFCLSID rclsid, @@ -563,10 +553,6 @@ long Call_CanUnloadNow(void) { PyObject *mod, *func, *result; long retval; - static PyObject *context; - - if (context == NULL) - context = PyUnicode_InternFromString("_ctypes.DllCanUnloadNow"); mod = PyImport_ImportModule("ctypes"); if (!mod) { @@ -580,24 +566,27 @@ long Call_CanUnloadNow(void) func = PyObject_GetAttrString(mod, "DllCanUnloadNow"); Py_DECREF(mod); if (!func) { - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } result = _PyObject_CallNoArgs(func); Py_DECREF(func); if (!result) { - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } retval = PyLong_AsLong(result); if (PyErr_Occurred()) { - PyErr_WriteUnraisable(context ? context : Py_None); - retval = E_FAIL; + Py_DECREF(result); + goto error; } Py_DECREF(result); return retval; + +error: + PyErr_FormatUnraisable("Exception ignored while calling " + "ctypes.DllCanUnloadNow"); + return E_FAIL; } /* diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 92eedff5ec94f1..c6b6460126ca90 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -493,27 +493,29 @@ PyCArgObject_new(ctypes_state *st) } static int -PyCArg_traverse(PyCArgObject *self, visitproc visit, void *arg) +PyCArg_traverse(PyObject *op, visitproc visit, void *arg) { + PyCArgObject *self = _PyCArgObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->obj); return 0; } static int -PyCArg_clear(PyCArgObject *self) +PyCArg_clear(PyObject *op) { + PyCArgObject *self = _PyCArgObject_CAST(op); Py_CLEAR(self->obj); return 0; } static void -PyCArg_dealloc(PyCArgObject *self) +PyCArg_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); (void)PyCArg_clear(self); - tp->tp_free((PyObject *)self); + tp->tp_free(self); Py_DECREF(tp); } @@ -524,8 +526,9 @@ is_literal_char(unsigned char c) } static PyObject * -PyCArg_repr(PyCArgObject *self) +PyCArg_repr(PyObject *op) { + PyCArgObject *self = _PyCArgObject_CAST(op); switch(self->tag) { case 'b': case 'B': diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index dcac9da75360a4..9924d62c0881d1 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -194,17 +194,18 @@ PyCField_new_impl(PyTypeObject *type, PyObject *name, PyObject *proto, static int -PyCField_set(CFieldObject *self, PyObject *inst, PyObject *value) +PyCField_set(PyObject *op, PyObject *inst, PyObject *value) { CDataObject *dst; char *ptr; + CFieldObject *self = _CFieldObject_CAST(op); ctypes_state *st = get_module_state_by_class(Py_TYPE(self)); if (!CDataObject_Check(st, inst)) { PyErr_SetString(PyExc_TypeError, "not a ctype instance"); return -1; } - dst = (CDataObject *)inst; + dst = _CDataObject_CAST(inst); ptr = dst->b_ptr + self->offset; if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -212,13 +213,14 @@ PyCField_set(CFieldObject *self, PyObject *inst, PyObject *value) return -1; } return PyCData_set(st, inst, self->proto, self->setfunc, value, - self->index, self->size, ptr); + self->index, self->size, ptr); } static PyObject * -PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type) +PyCField_get(PyObject *op, PyObject *inst, PyTypeObject *type) { CDataObject *src; + CFieldObject *self = _CFieldObject_CAST(op); if (inst == NULL) { return Py_NewRef(self); } @@ -228,21 +230,21 @@ PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type) "not a ctype instance"); return NULL; } - src = (CDataObject *)inst; + src = _CDataObject_CAST(inst); return PyCData_get(st, self->proto, self->getfunc, inst, - self->index, self->size, src->b_ptr + self->offset); + self->index, self->size, src->b_ptr + self->offset); } static PyObject * PyCField_get_offset(PyObject *self, void *data) { - return PyLong_FromSsize_t(((CFieldObject *)self)->offset); + return PyLong_FromSsize_t(_CFieldObject_CAST(self)->offset); } static PyObject * PyCField_get_size(PyObject *self, void *data) { - return PyLong_FromSsize_t(((CFieldObject *)self)->size); + return PyLong_FromSsize_t(_CFieldObject_CAST(self)->size); } static PyGetSetDef PyCField_getset[] = { @@ -252,17 +254,20 @@ static PyGetSetDef PyCField_getset[] = { }; static int -PyCField_traverse(CFieldObject *self, visitproc visit, void *arg) +PyCField_traverse(PyObject *op, visitproc visit, void *arg) { + CFieldObject *self = _CFieldObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->proto); return 0; } static int -PyCField_clear(CFieldObject *self) +PyCField_clear(PyObject *op) { + CFieldObject *self = _CFieldObject_CAST(op); Py_CLEAR(self->proto); + Py_CLEAR(self->name); return 0; } @@ -271,17 +276,16 @@ PyCField_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - CFieldObject *self_cf = (CFieldObject *)self; - (void)PyCField_clear(self_cf); - Py_CLEAR(self_cf->name); + (void)PyCField_clear(self); Py_TYPE(self)->tp_free(self); Py_DECREF(tp); } static PyObject * -PyCField_repr(CFieldObject *self) +PyCField_repr(PyObject *op) { PyObject *result; + CFieldObject *self = _CFieldObject_CAST(op); Py_ssize_t bits = NUM_BITS(self->size); Py_ssize_t size = LOW_BIT(self->size); const char *name; @@ -1255,6 +1259,10 @@ for code in 'sbBcdCEFgfhHiIlLqQPzuUZXvO': // always contains NULLs: struct fielddesc fmt_nil; + + // Result of _ctypes_get_simple_type_chars. Initialized just after + // the rest of formattable, so we stash it here. + char simple_type_chars[26]; }; static struct formattable formattable; @@ -1315,8 +1323,8 @@ _Py_COMP_DIAG_PUSH /* Delayed initialization. Windows cannot statically reference dynamically loaded addresses from DLLs. */ -void -_ctypes_init_fielddesc(void) +static void +_ctypes_init_fielddesc_locked(void) { /* Fixed-width integers */ @@ -1432,9 +1440,11 @@ for base_code, base_c_type in [ TABLE_ENTRY_SW(d, &ffi_type_double); #if defined(Py_HAVE_C_COMPLEX) && defined(Py_FFI_SUPPORT_C_COMPLEX) - TABLE_ENTRY(C, &ffi_type_complex_double); - TABLE_ENTRY(E, &ffi_type_complex_float); - TABLE_ENTRY(F, &ffi_type_complex_longdouble); + if (Py_FFI_COMPLEX_AVAILABLE) { + TABLE_ENTRY(C, &ffi_type_complex_double); + TABLE_ENTRY(E, &ffi_type_complex_float); + TABLE_ENTRY(F, &ffi_type_complex_longdouble); + } #endif TABLE_ENTRY(g, &ffi_type_longdouble); TABLE_ENTRY_SW(f, &ffi_type_float); @@ -1466,21 +1476,75 @@ for base_code, base_c_type in [ formattable.fmt_bool.code = '?'; formattable.fmt_bool.setfunc = bool_set; formattable.fmt_bool.getfunc = bool_get; + +/*[python input] +all_chars = "cbBhHiIlLdCEFfuzZqQPXOv?g" +print(f' assert(sizeof(formattable.simple_type_chars) == {len(all_chars)+1});') +print(f' int i = 0;') +for char in all_chars: + ident_char = {'?': 'bool'}.get(char, char) + print(f" if (formattable.fmt_{ident_char}.code) " + + f"formattable.simple_type_chars[i++] = '{char}';") +print(f" formattable.simple_type_chars[i] = 0;") +[python start generated code]*/ + assert(sizeof(formattable.simple_type_chars) == 26); + int i = 0; + if (formattable.fmt_c.code) formattable.simple_type_chars[i++] = 'c'; + if (formattable.fmt_b.code) formattable.simple_type_chars[i++] = 'b'; + if (formattable.fmt_B.code) formattable.simple_type_chars[i++] = 'B'; + if (formattable.fmt_h.code) formattable.simple_type_chars[i++] = 'h'; + if (formattable.fmt_H.code) formattable.simple_type_chars[i++] = 'H'; + if (formattable.fmt_i.code) formattable.simple_type_chars[i++] = 'i'; + if (formattable.fmt_I.code) formattable.simple_type_chars[i++] = 'I'; + if (formattable.fmt_l.code) formattable.simple_type_chars[i++] = 'l'; + if (formattable.fmt_L.code) formattable.simple_type_chars[i++] = 'L'; + if (formattable.fmt_d.code) formattable.simple_type_chars[i++] = 'd'; + if (formattable.fmt_C.code) formattable.simple_type_chars[i++] = 'C'; + if (formattable.fmt_E.code) formattable.simple_type_chars[i++] = 'E'; + if (formattable.fmt_F.code) formattable.simple_type_chars[i++] = 'F'; + if (formattable.fmt_f.code) formattable.simple_type_chars[i++] = 'f'; + if (formattable.fmt_u.code) formattable.simple_type_chars[i++] = 'u'; + if (formattable.fmt_z.code) formattable.simple_type_chars[i++] = 'z'; + if (formattable.fmt_Z.code) formattable.simple_type_chars[i++] = 'Z'; + if (formattable.fmt_q.code) formattable.simple_type_chars[i++] = 'q'; + if (formattable.fmt_Q.code) formattable.simple_type_chars[i++] = 'Q'; + if (formattable.fmt_P.code) formattable.simple_type_chars[i++] = 'P'; + if (formattable.fmt_X.code) formattable.simple_type_chars[i++] = 'X'; + if (formattable.fmt_O.code) formattable.simple_type_chars[i++] = 'O'; + if (formattable.fmt_v.code) formattable.simple_type_chars[i++] = 'v'; + if (formattable.fmt_bool.code) formattable.simple_type_chars[i++] = '?'; + if (formattable.fmt_g.code) formattable.simple_type_chars[i++] = 'g'; + formattable.simple_type_chars[i] = 0; +/*[python end generated code: output=e6e5098a02f4b606 input=72031a625eac00c1]*/ + } #undef FIXINT_FIELDDESC_FOR _Py_COMP_DIAG_POP -struct fielddesc * -_ctypes_get_fielddesc(const char *fmt) +static void +_ctypes_init_fielddesc(void) { static bool initialized = false; static PyMutex mutex = {0}; PyMutex_Lock(&mutex); if (!initialized) { - _ctypes_init_fielddesc(); + _ctypes_init_fielddesc_locked(); initialized = true; } PyMutex_Unlock(&mutex); +} + +char * +_ctypes_get_simple_type_chars(void) { + _ctypes_init_fielddesc(); + return formattable.simple_type_chars; +} + +struct fielddesc * +_ctypes_get_fielddesc(const char *fmt) +{ + _ctypes_init_fielddesc(); + struct fielddesc *result = NULL; switch(fmt[0]) { /*[python input] diff --git a/Modules/_ctypes/clinic/_ctypes.c.h b/Modules/_ctypes/clinic/_ctypes.c.h index 405a3c9238d77d..1f2e871137ed79 100644 --- a/Modules/_ctypes/clinic/_ctypes.c.h +++ b/Modules/_ctypes/clinic/_ctypes.c.h @@ -331,7 +331,7 @@ PyCPointerType_set_type_impl(PyTypeObject *self, PyTypeObject *cls, PyObject *type); static PyObject * -PyCPointerType_set_type(PyTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +PyCPointerType_set_type(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -356,7 +356,7 @@ PyCPointerType_set_type(PyTypeObject *self, PyTypeObject *cls, PyObject *const * goto exit; } type = args[0]; - return_value = PyCPointerType_set_type_impl(self, cls, type); + return_value = PyCPointerType_set_type_impl((PyTypeObject *)self, cls, type); exit: return return_value; @@ -616,12 +616,12 @@ static int _ctypes_CFuncPtr_errcheck_set_impl(PyCFuncPtrObject *self, PyObject *value); static int -_ctypes_CFuncPtr_errcheck_set(PyCFuncPtrObject *self, PyObject *value, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_errcheck_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_errcheck_set_impl(self, value); + return_value = _ctypes_CFuncPtr_errcheck_set_impl((PyCFuncPtrObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -648,12 +648,12 @@ static PyObject * _ctypes_CFuncPtr_errcheck_get_impl(PyCFuncPtrObject *self); static PyObject * -_ctypes_CFuncPtr_errcheck_get(PyCFuncPtrObject *self, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_errcheck_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_errcheck_get_impl(self); + return_value = _ctypes_CFuncPtr_errcheck_get_impl((PyCFuncPtrObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -673,12 +673,12 @@ static int _ctypes_CFuncPtr_restype_set_impl(PyCFuncPtrObject *self, PyObject *value); static int -_ctypes_CFuncPtr_restype_set(PyCFuncPtrObject *self, PyObject *value, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_restype_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_restype_set_impl(self, value); + return_value = _ctypes_CFuncPtr_restype_set_impl((PyCFuncPtrObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -705,12 +705,12 @@ static PyObject * _ctypes_CFuncPtr_restype_get_impl(PyCFuncPtrObject *self); static PyObject * -_ctypes_CFuncPtr_restype_get(PyCFuncPtrObject *self, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_restype_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_restype_get_impl(self); + return_value = _ctypes_CFuncPtr_restype_get_impl((PyCFuncPtrObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -730,12 +730,12 @@ static int _ctypes_CFuncPtr_argtypes_set_impl(PyCFuncPtrObject *self, PyObject *value); static int -_ctypes_CFuncPtr_argtypes_set(PyCFuncPtrObject *self, PyObject *value, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_argtypes_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_argtypes_set_impl(self, value); + return_value = _ctypes_CFuncPtr_argtypes_set_impl((PyCFuncPtrObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -762,12 +762,12 @@ static PyObject * _ctypes_CFuncPtr_argtypes_get_impl(PyCFuncPtrObject *self); static PyObject * -_ctypes_CFuncPtr_argtypes_get(PyCFuncPtrObject *self, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_argtypes_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_argtypes_get_impl(self); + return_value = _ctypes_CFuncPtr_argtypes_get_impl((PyCFuncPtrObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -793,4 +793,4 @@ Simple_from_outparm(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py } return Simple_from_outparm_impl(self, cls); } -/*[clinic end generated code: output=cb3583522a2c5ce5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a18d87239b6fb8ca input=a9049054013a1b77]*/ diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 45e00a538fb5a5..07049d0968c790 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -5,8 +5,21 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_typeobject.h" // _PyType_GetModuleState() +// Do we support C99 complex types in ffi? +// For Apple's libffi, this must be determined at runtime (see gh-128156). #if defined(Py_HAVE_C_COMPLEX) && defined(Py_FFI_SUPPORT_C_COMPLEX) # include "../_complex.h" // complex +# if USING_APPLE_OS_LIBFFI && defined(__has_builtin) +# if __has_builtin(__builtin_available) +# define Py_FFI_COMPLEX_AVAILABLE __builtin_available(macOS 10.15, *) +# else +# define Py_FFI_COMPLEX_AVAILABLE 1 +# endif +# else +# define Py_FFI_COMPLEX_AVAILABLE 1 +# endif +#else +# define Py_FFI_COMPLEX_AVAILABLE 0 #endif #ifndef MS_WIN32 @@ -112,7 +125,10 @@ extern PyType_Spec cfield_spec; extern PyType_Spec cthunk_spec; typedef struct tagPyCArgObject PyCArgObject; +#define _PyCArgObject_CAST(op) ((PyCArgObject *)(op)) + typedef struct tagCDataObject CDataObject; +#define _CDataObject_CAST(op) ((CDataObject *)(op)) // GETFUNC: convert the C value at *ptr* to Python object, return the object // SETFUNC: write content of the PyObject *value* to the location at *ptr*; @@ -176,6 +192,8 @@ typedef struct { ffi_type *ffi_restype; ffi_type *atypes[1]; } CThunkObject; + +#define _CThunkObject_CAST(op) ((CThunkObject *)(op)) #define CThunk_CheckExact(st, v) Py_IS_TYPE(v, st->PyCThunk_Type) typedef struct { @@ -209,6 +227,8 @@ typedef struct { PyObject *paramflags; } PyCFuncPtrObject; +#define _PyCFuncPtrObject_CAST(op) ((PyCFuncPtrObject *)(op)) + extern int PyCStructUnionType_update_stginfo(PyObject *fields, PyObject *type, int isStruct); extern int PyType_stginfo(PyTypeObject *self, Py_ssize_t *psize, Py_ssize_t *palign, Py_ssize_t *plength); extern int PyObject_stginfo(PyObject *self, Py_ssize_t *psize, Py_ssize_t *palign, Py_ssize_t *plength); @@ -255,6 +275,9 @@ struct fielddesc { GETFUNC getfunc_swapped; }; +// Get all single-character type codes (for use in error messages) +extern char *_ctypes_get_simple_type_chars(void); + typedef struct CFieldObject { PyObject_HEAD Py_ssize_t offset; @@ -269,6 +292,8 @@ typedef struct CFieldObject { PyObject *name; /* exact PyUnicode */ } CFieldObject; +#define _CFieldObject_CAST(op) ((CFieldObject *)(op)) + /**************************************************************** StgInfo @@ -405,6 +430,8 @@ struct tagPyCArgObject { Py_ssize_t size; /* for the 'V' tag */ }; +#define _PyCArgObject_CAST(op) ((PyCArgObject *)(op)) + #define PyCArg_CheckExact(st, v) Py_IS_TYPE(v, st->PyCArg_Type) extern PyCArgObject *PyCArgObject_new(ctypes_state *st); @@ -543,3 +570,51 @@ PyStgInfo_Init(ctypes_state *state, PyTypeObject *type) info->initialized = 1; return info; } + +/* See discussion in gh-128490. The plan here is to eventually use a per-object + * lock rather than a critical section, but that work is for later. */ +#ifdef Py_GIL_DISABLED +# define LOCK_PTR(self) Py_BEGIN_CRITICAL_SECTION(self) +# define UNLOCK_PTR(self) Py_END_CRITICAL_SECTION() +#else +/* + * Dummy functions instead of macros so that 'self' can be + * unused in the caller without triggering a compiler warning. + */ +static inline void LOCK_PTR(CDataObject *Py_UNUSED(self)) {} +static inline void UNLOCK_PTR(CDataObject *Py_UNUSED(self)) {} +#endif + +static inline void +locked_memcpy_to(CDataObject *self, void *buf, Py_ssize_t size) +{ + LOCK_PTR(self); + (void)memcpy(self->b_ptr, buf, size); + UNLOCK_PTR(self); +} + +static inline void +locked_memcpy_from(void *buf, CDataObject *self, Py_ssize_t size) +{ + LOCK_PTR(self); + (void)memcpy(buf, self->b_ptr, size); + UNLOCK_PTR(self); +} + +static inline void * +locked_deref(CDataObject *self) +{ + void *ptr; + LOCK_PTR(self); + ptr = *(void **)self->b_ptr; + UNLOCK_PTR(self); + return ptr; +} + +static inline void +locked_deref_assign(CDataObject *self, void *new_ptr) +{ + LOCK_PTR(self); + *(void **)self->b_ptr = new_ptr; + UNLOCK_PTR(self); +} diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index 5ca5b62427600d..d63a46a3bc23d2 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -257,7 +257,7 @@ PyCStructUnionType_update_stginfo(PyObject *type, PyObject *fields, int isStruct goto error; } - PyObject *layout_func = _PyImport_GetModuleAttrString("ctypes._layout", + PyObject *layout_func = PyImport_ImportModuleAttrString("ctypes._layout", "get_layout"); if (!layout_func) { goto error; diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index c6835738348ff9..7213a5be07de4b 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -226,7 +226,7 @@ _PyCursesCheckFunction(int called, const char *funcname) if (called == TRUE) { return 1; } - PyObject *exc = _PyImport_GetModuleAttrString("_curses", "error"); + PyObject *exc = PyImport_ImportModuleAttrString("_curses", "error"); if (exc != NULL) { PyErr_Format(exc, "must call %s() first", funcname); Py_DECREF(exc); diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index 368d10411366c4..bcbf4217d41a9b 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -226,7 +226,7 @@ clear_current_module(PyInterpreterState *interp, PyObject *expected) goto finally; error: - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while clearing _datetime module"); finally: PyErr_SetRaisedException(exc); @@ -1839,7 +1839,7 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, assert(object && format && timetuple); assert(PyUnicode_Check(format)); - PyObject *strftime = _PyImport_GetModuleAttrString("time", "strftime"); + PyObject *strftime = PyImport_ImportModuleAttrString("time", "strftime"); if (strftime == NULL) { return NULL; } @@ -1849,9 +1849,10 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, * is expensive, don't unless they're actually used. */ - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto Error; + } Py_ssize_t flen = PyUnicode_GET_LENGTH(format); Py_ssize_t i = 0; @@ -1955,11 +1956,11 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, if (ch == 'C') { n -= 2; } - if (_PyUnicodeWriter_WriteSubstring(&writer, format, start, end) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format, start, end) < 0) { goto Error; } start = i; - if (_PyUnicodeWriter_WriteASCIIString(&writer, buf, n) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, buf, n) < 0) { goto Error; } continue; @@ -1971,25 +1972,25 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, } assert(replacement != NULL); assert(PyUnicode_Check(replacement)); - if (_PyUnicodeWriter_WriteSubstring(&writer, format, start, end) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format, start, end) < 0) { goto Error; } start = i; - if (_PyUnicodeWriter_WriteStr(&writer, replacement) < 0) { + if (PyUnicodeWriter_WriteStr(writer, replacement) < 0) { goto Error; } } /* end while() */ PyObject *newformat; if (start == 0) { - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); newformat = Py_NewRef(format); } else { - if (_PyUnicodeWriter_WriteSubstring(&writer, format, start, flen) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format, start, flen) < 0) { goto Error; } - newformat = _PyUnicodeWriter_Finish(&writer); + newformat = PyUnicodeWriter_Finish(writer); if (newformat == NULL) { goto Done; } @@ -2007,7 +2008,7 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, return result; Error: - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); goto Done; } @@ -2021,7 +2022,7 @@ static PyObject * time_time(void) { PyObject *result = NULL; - PyObject *time = _PyImport_GetModuleAttrString("time", "time"); + PyObject *time = PyImport_ImportModuleAttrString("time", "time"); if (time != NULL) { result = PyObject_CallNoArgs(time); @@ -2039,7 +2040,7 @@ build_struct_time(int y, int m, int d, int hh, int mm, int ss, int dstflag) PyObject *struct_time; PyObject *result; - struct_time = _PyImport_GetModuleAttrString("time", "struct_time"); + struct_time = PyImport_ImportModuleAttrString("time", "struct_time"); if (struct_time == NULL) { return NULL; } @@ -4947,7 +4948,7 @@ datetime.time.replace minute: int(c_default="TIME_GET_MINUTE(self)") = unchanged second: int(c_default="TIME_GET_SECOND(self)") = unchanged microsecond: int(c_default="TIME_GET_MICROSECOND(self)") = unchanged - tzinfo: object(c_default="HASTZINFO(self) ? self->tzinfo : Py_None") = unchanged + tzinfo: object(c_default="HASTZINFO(self) ? ((PyDateTime_Time *)self)->tzinfo : Py_None") = unchanged * fold: int(c_default="TIME_GET_FOLD(self)") = unchanged @@ -4958,7 +4959,7 @@ static PyObject * datetime_time_replace_impl(PyDateTime_Time *self, int hour, int minute, int second, int microsecond, PyObject *tzinfo, int fold) -/*[clinic end generated code: output=0b89a44c299e4f80 input=9b6a35b1e704b0ca]*/ +/*[clinic end generated code: output=0b89a44c299e4f80 input=abf23656e8df4e97]*/ { return new_time_subclass_fold_ex(hour, minute, second, microsecond, tzinfo, fold, (PyObject *)Py_TYPE(self)); @@ -6449,7 +6450,7 @@ datetime.datetime.replace minute: int(c_default="DATE_GET_MINUTE(self)") = unchanged second: int(c_default="DATE_GET_SECOND(self)") = unchanged microsecond: int(c_default="DATE_GET_MICROSECOND(self)") = unchanged - tzinfo: object(c_default="HASTZINFO(self) ? self->tzinfo : Py_None") = unchanged + tzinfo: object(c_default="HASTZINFO(self) ? ((PyDateTime_DateTime *)self)->tzinfo : Py_None") = unchanged * fold: int(c_default="DATE_GET_FOLD(self)") = unchanged @@ -6461,7 +6462,7 @@ datetime_datetime_replace_impl(PyDateTime_DateTime *self, int year, int month, int day, int hour, int minute, int second, int microsecond, PyObject *tzinfo, int fold) -/*[clinic end generated code: output=00bc96536833fddb input=9b38253d56d9bcad]*/ +/*[clinic end generated code: output=00bc96536833fddb input=fd972762d604d3e7]*/ { return new_datetime_subclass_fold_ex(year, month, day, hour, minute, second, microsecond, tzinfo, fold, diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 0def463c7d8b9e..3dcb3e9870c8a4 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -178,11 +178,15 @@ typedef struct { mpd_uint_t data[_Py_DEC_MINALLOC]; } PyDecObject; +#define _PyDecObject_CAST(op) ((PyDecObject *)(op)) + typedef struct { PyObject_HEAD uint32_t *flags; } PyDecSignalDictObject; +#define _PyDecSignalDictObject_CAST(op) ((PyDecSignalDictObject *)(op)) + typedef struct PyDecContextObject { PyObject_HEAD mpd_context_t ctx; @@ -193,23 +197,27 @@ typedef struct PyDecContextObject { decimal_state *modstate; } PyDecContextObject; +#define _PyDecContextObject_CAST(op) ((PyDecContextObject *)(op)) + typedef struct { PyObject_HEAD PyObject *local; PyObject *global; } PyDecContextManagerObject; +#define _PyDecContextManagerObject_CAST(op) ((PyDecContextManagerObject *)(op)) + #undef MPD #undef CTX #define PyDec_CheckExact(st, v) Py_IS_TYPE(v, (st)->PyDec_Type) #define PyDec_Check(st, v) PyObject_TypeCheck(v, (st)->PyDec_Type) #define PyDecSignalDict_Check(st, v) Py_IS_TYPE(v, (st)->PyDecSignalDict_Type) #define PyDecContext_Check(st, v) PyObject_TypeCheck(v, (st)->PyDecContext_Type) -#define MPD(v) (&((PyDecObject *)v)->dec) -#define SdFlagAddr(v) (((PyDecSignalDictObject *)v)->flags) -#define SdFlags(v) (*((PyDecSignalDictObject *)v)->flags) -#define CTX(v) (&((PyDecContextObject *)v)->ctx) -#define CtxCaps(v) (((PyDecContextObject *)v)->capitals) +#define MPD(v) (&_PyDecObject_CAST(v)->dec) +#define SdFlagAddr(v) (_PyDecSignalDictObject_CAST(v)->flags) +#define SdFlags(v) (*_PyDecSignalDictObject_CAST(v)->flags) +#define CTX(v) (&_PyDecContextObject_CAST(v)->ctx) +#define CtxCaps(v) (_PyDecContextObject_CAST(v)->capitals) static inline decimal_state * get_module_state_from_ctx(PyObject *v) @@ -1413,8 +1421,9 @@ context_new(PyTypeObject *type, } static int -context_traverse(PyDecContextObject *self, visitproc visit, void *arg) +context_traverse(PyObject *op, visitproc visit, void *arg) { + PyDecContextObject *self = _PyDecContextObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->traps); Py_VISIT(self->flags); @@ -1422,15 +1431,16 @@ context_traverse(PyDecContextObject *self, visitproc visit, void *arg) } static int -context_clear(PyDecContextObject *self) +context_clear(PyObject *op) { + PyDecContextObject *self = _PyDecContextObject_CAST(op); Py_CLEAR(self->traps); Py_CLEAR(self->flags); return 0; } static void -context_dealloc(PyDecContextObject *self) +context_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); @@ -1473,7 +1483,7 @@ context_init(PyObject *self, PyObject *args, PyObject *kwds) } static PyObject * -context_repr(PyDecContextObject *self) +context_repr(PyObject *self) { mpd_context_t *ctx; char flags[MPD_MAX_SIGNAL_LIST]; @@ -1481,7 +1491,7 @@ context_repr(PyDecContextObject *self) int n, mem; #ifdef Py_DEBUG - decimal_state *state = get_module_state_from_ctx((PyObject *)self); + decimal_state *state = get_module_state_from_ctx(self); assert(PyDecContext_Check(state, self)); #endif ctx = CTX(self); @@ -1501,7 +1511,7 @@ context_repr(PyDecContextObject *self) "Context(prec=%zd, rounding=%s, Emin=%zd, Emax=%zd, " "capitals=%d, clamp=%d, flags=%s, traps=%s)", ctx->prec, mpd_round_string[ctx->round], ctx->emin, ctx->emax, - self->capitals, ctx->clamp, flags, traps); + CtxCaps(self), ctx->clamp, flags, traps); } static void @@ -1621,16 +1631,16 @@ context_reduce(PyObject *self, PyObject *Py_UNUSED(dummy)) static PyGetSetDef context_getsets [] = { - { "prec", (getter)context_getprec, (setter)context_setprec, NULL, NULL}, - { "Emax", (getter)context_getemax, (setter)context_setemax, NULL, NULL}, - { "Emin", (getter)context_getemin, (setter)context_setemin, NULL, NULL}, - { "rounding", (getter)context_getround, (setter)context_setround, NULL, NULL}, - { "capitals", (getter)context_getcapitals, (setter)context_setcapitals, NULL, NULL}, - { "clamp", (getter)context_getclamp, (setter)context_setclamp, NULL, NULL}, + { "prec", context_getprec, context_setprec, NULL, NULL}, + { "Emax", context_getemax, context_setemax, NULL, NULL}, + { "Emin", context_getemin, context_setemin, NULL, NULL}, + { "rounding", context_getround, context_setround, NULL, NULL}, + { "capitals", context_getcapitals, context_setcapitals, NULL, NULL}, + { "clamp", context_getclamp, context_setclamp, NULL, NULL}, #ifdef EXTRA_FUNCTIONALITY - { "_allcr", (getter)context_getallcr, (setter)context_setallcr, NULL, NULL}, - { "_traps", (getter)context_gettraps, (setter)context_settraps, NULL, NULL}, - { "_flags", (getter)context_getstatus, (setter)context_setstatus, NULL, NULL}, + { "_allcr", context_getallcr, context_setallcr, NULL, NULL}, + { "_traps", context_gettraps, context_settraps, NULL, NULL}, + { "_flags", context_getstatus, context_setstatus, NULL, NULL}, #endif {NULL} }; @@ -1946,9 +1956,9 @@ ctxmanager_new(PyObject *m, PyObject *args, PyObject *kwds) } static int -ctxmanager_traverse(PyDecContextManagerObject *self, visitproc visit, - void *arg) +ctxmanager_traverse(PyObject *op, visitproc visit, void *arg) { + PyDecContextManagerObject *self = _PyDecContextManagerObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->local); Py_VISIT(self->global); @@ -1956,29 +1966,29 @@ ctxmanager_traverse(PyDecContextManagerObject *self, visitproc visit, } static int -ctxmanager_clear(PyDecContextManagerObject *self) +ctxmanager_clear(PyObject *op) { + PyDecContextManagerObject *self = _PyDecContextManagerObject_CAST(op); Py_CLEAR(self->local); Py_CLEAR(self->global); return 0; } static void -ctxmanager_dealloc(PyDecContextManagerObject *self) +ctxmanager_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); (void)ctxmanager_clear(self); - tp->tp_free((PyObject *)self); + tp->tp_free(self); Py_DECREF(tp); } static PyObject * -ctxmanager_set_local(PyDecContextManagerObject *self, - PyObject *Py_UNUSED(dummy)) +ctxmanager_set_local(PyObject *op, PyObject *Py_UNUSED(dummy)) { PyObject *ret; - + PyDecContextManagerObject *self = _PyDecContextManagerObject_CAST(op); ret = PyDec_SetCurrentContext(PyType_GetModule(Py_TYPE(self)), self->local); if (ret == NULL) { return NULL; @@ -1989,11 +1999,10 @@ ctxmanager_set_local(PyDecContextManagerObject *self, } static PyObject * -ctxmanager_restore_global(PyDecContextManagerObject *self, - PyObject *Py_UNUSED(args)) +ctxmanager_restore_global(PyObject *op, PyObject *Py_UNUSED(args)) { PyObject *ret; - + PyDecContextManagerObject *self = _PyDecContextManagerObject_CAST(op); ret = PyDec_SetCurrentContext(PyType_GetModule(Py_TYPE(self)), self->global); if (ret == NULL) { return NULL; @@ -2005,8 +2014,8 @@ ctxmanager_restore_global(PyDecContextManagerObject *self, static PyMethodDef ctxmanager_methods[] = { - {"__enter__", (PyCFunction)ctxmanager_set_local, METH_NOARGS, NULL}, - {"__exit__", (PyCFunction)ctxmanager_restore_global, METH_VARARGS, NULL}, + {"__enter__", ctxmanager_set_local, METH_NOARGS, NULL}, + {"__exit__", ctxmanager_restore_global, METH_VARARGS, NULL}, {NULL, NULL} }; @@ -2336,15 +2345,16 @@ dec_from_long(decimal_state *state, PyTypeObject *type, PyObject *v, } if (export_long.digits) { const PyLongLayout *layout = PyLong_GetNativeLayout(); - uint32_t base = (uint32_t)1 << layout->bits_per_digit; - uint8_t sign = export_long.negative ? MPD_NEG : MPD_POS; - Py_ssize_t len = export_long.ndigits; - assert(layout->bits_per_digit <= 32); + assert(layout->bits_per_digit < 32); assert(layout->digits_order == -1); assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); assert(layout->digit_size == 2 || layout->digit_size == 4); + uint32_t base = (uint32_t)1 << layout->bits_per_digit; + uint8_t sign = export_long.negative ? MPD_NEG : MPD_POS; + Py_ssize_t len = export_long.ndigits; + if (layout->digit_size == 4) { mpd_qimport_u32(MPD(dec), export_long.digits, len, sign, base, ctx, status); @@ -3464,7 +3474,7 @@ pydec_format(PyObject *dec, PyObject *context, PyObject *fmt, decimal_state *sta PyObject *u; if (state->PyDecimal == NULL) { - state->PyDecimal = _PyImport_GetModuleAttrString("_pydecimal", "Decimal"); + state->PyDecimal = PyImport_ImportModuleAttrString("_pydecimal", "Decimal"); if (state->PyDecimal == NULL) { return NULL; } @@ -3642,13 +3652,6 @@ dec_format(PyObject *dec, PyObject *args) static PyObject * dec_as_long(PyObject *dec, PyObject *context, int round) { - PyLongObject *pylong; - digit *ob_digit; - size_t n; - mpd_t *x; - mpd_context_t workctx; - uint32_t status = 0; - if (mpd_isspecial(MPD(dec))) { if (mpd_isnan(MPD(dec))) { PyErr_SetString(PyExc_ValueError, @@ -3661,12 +3664,16 @@ dec_as_long(PyObject *dec, PyObject *context, int round) return NULL; } - x = mpd_qnew(); + mpd_t *x = mpd_qnew(); + if (x == NULL) { PyErr_NoMemory(); return NULL; } - workctx = *CTX(context); + + mpd_context_t workctx = *CTX(context); + uint32_t status = 0; + workctx.round = round; mpd_qround_to_int(x, MPD(dec), &workctx, &status); if (dec_addstatus(context, status)) { @@ -3675,34 +3682,56 @@ dec_as_long(PyObject *dec, PyObject *context, int round) } status = 0; - ob_digit = NULL; -#if PYLONG_BITS_IN_DIGIT == 30 - n = mpd_qexport_u32(&ob_digit, 0, PyLong_BASE, x, &status); -#elif PYLONG_BITS_IN_DIGIT == 15 - n = mpd_qexport_u16(&ob_digit, 0, PyLong_BASE, x, &status); -#else - #error "PYLONG_BITS_IN_DIGIT should be 15 or 30" -#endif + int64_t val = mpd_qget_i64(x, &status); + + if (!status) { + mpd_del(x); + return PyLong_FromInt64(val); + } + assert(!mpd_iszero(x)); + + const PyLongLayout *layout = PyLong_GetNativeLayout(); + + assert(layout->bits_per_digit < 32); + assert(layout->digits_order == -1); + assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); + assert(layout->digit_size == 2 || layout->digit_size == 4); + + uint32_t base = (uint32_t)1 << layout->bits_per_digit; + /* We use a temporary buffer for digits for now, as for nonzero rdata + mpd_qexport_u32/u16() require either space "allocated by one of + libmpdec’s allocation functions" or "rlen MUST be correct" (to avoid + reallocation). This can be further optimized by using rlen from + mpd_sizeinbase(). See gh-127925. */ + void *tmp_digits = NULL; + size_t n; + + status = 0; + if (layout->digit_size == 4) { + n = mpd_qexport_u32((uint32_t **)&tmp_digits, 0, base, x, &status); + } + else { + n = mpd_qexport_u16((uint16_t **)&tmp_digits, 0, base, x, &status); + } if (n == SIZE_MAX) { PyErr_NoMemory(); mpd_del(x); + mpd_free(tmp_digits); return NULL; } - if (n == 1) { - sdigit val = mpd_arith_sign(x) * ob_digit[0]; - mpd_free(ob_digit); - mpd_del(x); - return PyLong_FromLong(val); - } + void *digits; + PyLongWriter *writer = PyLongWriter_Create(mpd_isnegative(x), n, &digits); - assert(n > 0); - assert(!mpd_iszero(x)); - pylong = _PyLong_FromDigits(mpd_isnegative(x), n, ob_digit); - mpd_free(ob_digit); mpd_del(x); - return (PyObject *) pylong; + if (writer == NULL) { + mpd_free(tmp_digits); + return NULL; + } + memcpy(digits, tmp_digits, layout->digit_size*n); + mpd_free(tmp_digits); + return PyLongWriter_Finish(writer); } /* Convert a Decimal to its exact integer ratio representation. */ @@ -5021,8 +5050,8 @@ dec_imag(PyObject *self, void *Py_UNUSED(closure)) static PyGetSetDef dec_getsets [] = { - { "real", (getter)dec_real, NULL, NULL, NULL}, - { "imag", (getter)dec_imag, NULL, NULL, NULL}, + { "real", dec_real, NULL, NULL, NULL}, + { "imag", dec_imag, NULL, NULL, NULL}, {NULL} }; diff --git a/Modules/_decimal/libmpdec/io.c b/Modules/_decimal/libmpdec/io.c index 4e95b8964c8e5d..bdcca001659bc0 100644 --- a/Modules/_decimal/libmpdec/io.c +++ b/Modules/_decimal/libmpdec/io.c @@ -347,6 +347,10 @@ mpd_qset_string_exact(mpd_t *dec, const char *s, uint32_t *status) or the location of a decimal point. */ #define EXTRACT_DIGIT(s, x, d, dot) \ if (s == dot) *s++ = '.'; *s++ = '0' + (char)(x / d); x %= d +#if defined(__GNUC__) && !defined(__INTEL_COMPILER) && __GNUC__ >= 12 + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstringop-overflow" +#endif static inline char * word_to_string(char *s, mpd_uint_t x, int n, char *dot) { @@ -378,6 +382,9 @@ word_to_string(char *s, mpd_uint_t x, int n, char *dot) *s = '\0'; return s; } +#if defined(__GNUC__) && !defined(__INTEL_COMPILER) && __GNUC__ >= 12 + #pragma GCC diagnostic pop +#endif /* Print exponent x to string s. Undefined for MPD_SSIZE_MIN. */ static inline char * diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 355f322d304c2f..b5b0b82571f882 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -16,7 +16,6 @@ #endif #include "Python.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() #include "pycore_pyhash.h" // _Py_HashSecret #include // offsetof() @@ -4393,7 +4392,7 @@ module_exec(PyObject *m) CREATE_TYPE(m, st->Element_Type, &element_spec); CREATE_TYPE(m, st->XMLParser_Type, &xmlparser_spec); - st->deepcopy_obj = _PyImport_GetModuleAttrString("copy", "deepcopy"); + st->deepcopy_obj = PyImport_ImportModuleAttrString("copy", "deepcopy"); if (st->deepcopy_obj == NULL) { goto error; } @@ -4403,7 +4402,7 @@ module_exec(PyObject *m) goto error; /* link against pyexpat */ - if (!(st->expat_capsule = _PyImport_GetModuleAttrString("pyexpat", "expat_CAPI"))) + if (!(st->expat_capsule = PyImport_ImportModuleAttrString("pyexpat", "expat_CAPI"))) goto error; if (!(st->expat_capi = PyCapsule_GetPointer(st->expat_capsule, PyExpat_CAPSULE_NAME))) goto error; diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index ea4fe247987e9d..ab2ebdba9249bf 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -76,6 +76,8 @@ typedef struct { GDBM_FILE di_dbm; } gdbmobject; +#define _gdbmobject_CAST(op) ((gdbmobject *)(op)) + #include "clinic/_gdbmmodule.c.h" #define check_gdbmobject_open(v, err) \ @@ -120,27 +122,29 @@ newgdbmobject(_gdbm_state *state, const char *file, int flags, int mode) /* Methods */ static int -gdbm_traverse(gdbmobject *dp, visitproc visit, void *arg) +gdbm_traverse(PyObject *op, visitproc visit, void *arg) { - Py_VISIT(Py_TYPE(dp)); + Py_VISIT(Py_TYPE(op)); return 0; } static void -gdbm_dealloc(gdbmobject *dp) +gdbm_dealloc(PyObject *op) { + gdbmobject *dp = _gdbmobject_CAST(op); + PyTypeObject *tp = Py_TYPE(dp); PyObject_GC_UnTrack(dp); if (dp->di_dbm) { gdbm_close(dp->di_dbm); } - PyTypeObject *tp = Py_TYPE(dp); tp->tp_free(dp); Py_DECREF(tp); } static Py_ssize_t -gdbm_length(gdbmobject *dp) +gdbm_length(PyObject *op) { + gdbmobject *dp = _gdbmobject_CAST(op); _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (dp->di_dbm == NULL) { PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); @@ -185,8 +189,9 @@ gdbm_length(gdbmobject *dp) } static int -gdbm_bool(gdbmobject *dp) +gdbm_bool(PyObject *op) { + gdbmobject *dp = _gdbmobject_CAST(op); _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (dp->di_dbm == NULL) { PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); @@ -235,10 +240,11 @@ parse_datum(PyObject *o, datum *d, const char *failmsg) } static PyObject * -gdbm_subscript(gdbmobject *dp, PyObject *key) +gdbm_subscript(PyObject *op, PyObject *key) { PyObject *v; datum drec, krec; + gdbmobject *dp = _gdbmobject_CAST(op); _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (!parse_datum(key, &krec, NULL)) { @@ -275,7 +281,7 @@ _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) { PyObject *res; - res = gdbm_subscript(self, key); + res = gdbm_subscript((PyObject *)self, key); if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); return Py_NewRef(default_value); @@ -284,10 +290,11 @@ _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) } static int -gdbm_ass_sub(gdbmobject *dp, PyObject *v, PyObject *w) +gdbm_ass_sub(PyObject *op, PyObject *v, PyObject *w) { datum krec, drec; const char *failmsg = "gdbm mappings have bytes or string indices only"; + gdbmobject *dp = _gdbmobject_CAST(op); _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (!parse_datum(v, &krec, failmsg)) { @@ -345,12 +352,12 @@ _gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, { PyObject *res; - res = gdbm_subscript(self, key); + res = gdbm_subscript((PyObject *)self, key); if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); - if (gdbm_ass_sub(self, key, default_value) < 0) + if (gdbm_ass_sub((PyObject *)self, key, default_value) < 0) return NULL; - return gdbm_subscript(self, key); + return gdbm_subscript((PyObject *)self, key); } return res; } @@ -841,7 +848,7 @@ _gdbm_module_clear(PyObject *module) static void _gdbm_module_free(void *module) { - _gdbm_module_clear((PyObject *)module); + (void)_gdbm_module_clear((PyObject *)module); } static PyModuleDef_Slot _gdbm_module_slots[] = { diff --git a/Modules/_interpretersmodule.c b/Modules/_interpretersmodule.c index a36823c4bb982b..fcd0baf696f943 100644 --- a/Modules/_interpretersmodule.c +++ b/Modules/_interpretersmodule.c @@ -459,7 +459,12 @@ _run_in_interpreter(PyInterpreterState *interp, // Prep and switch interpreters. if (_PyXI_Enter(&session, interp, shareables) < 0) { - assert(!PyErr_Occurred()); + if (PyErr_Occurred()) { + // If an error occured at this step, it means that interp + // was not prepared and switched. + return -1; + } + // Now, apply the error from another interpreter: PyObject *excinfo = _PyXI_ApplyError(session.error); if (excinfo != NULL) { *p_excinfo = excinfo; diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index fb66d3db0f7a1f..16095333db6638 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -588,7 +588,7 @@ _io_BytesIO_readinto_impl(bytesio *self, Py_buffer *buffer) /*[clinic input] _io.BytesIO.truncate - size: Py_ssize_t(accept={int, NoneType}, c_default="self->pos") = None + size: Py_ssize_t(accept={int, NoneType}, c_default="((bytesio *)self)->pos") = None / Truncate the file to at most size bytes. @@ -599,7 +599,7 @@ The current file position is unchanged. Returns the new size. static PyObject * _io_BytesIO_truncate_impl(bytesio *self, Py_ssize_t size) -/*[clinic end generated code: output=9ad17650c15fa09b input=423759dd42d2f7c1]*/ +/*[clinic end generated code: output=9ad17650c15fa09b input=dae4295e11c1bbb4]*/ { CHECK_CLOSED(self); CHECK_EXPORTS(self); diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index e035bd99baca5f..8ab8000fafee02 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -288,12 +288,12 @@ static PyObject * _io__Buffered___sizeof___impl(buffered *self); static PyObject * -_io__Buffered___sizeof__(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered___sizeof___impl(self); + return_value = _io__Buffered___sizeof___impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -319,12 +319,12 @@ static PyObject * _io__Buffered_simple_flush_impl(buffered *self); static PyObject * -_io__Buffered_simple_flush(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_simple_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_simple_flush_impl(self); + return_value = _io__Buffered_simple_flush_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -344,12 +344,12 @@ static PyObject * _io__Buffered_closed_get_impl(buffered *self); static PyObject * -_io__Buffered_closed_get(buffered *self, void *Py_UNUSED(context)) +_io__Buffered_closed_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_closed_get_impl(self); + return_value = _io__Buffered_closed_get_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -367,12 +367,12 @@ static PyObject * _io__Buffered_close_impl(buffered *self); static PyObject * -_io__Buffered_close(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_close_impl(self); + return_value = _io__Buffered_close_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -390,12 +390,12 @@ static PyObject * _io__Buffered_detach_impl(buffered *self); static PyObject * -_io__Buffered_detach(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_detach_impl(self); + return_value = _io__Buffered_detach_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -413,12 +413,12 @@ static PyObject * _io__Buffered_seekable_impl(buffered *self); static PyObject * -_io__Buffered_seekable(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_seekable_impl(self); + return_value = _io__Buffered_seekable_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -436,12 +436,12 @@ static PyObject * _io__Buffered_readable_impl(buffered *self); static PyObject * -_io__Buffered_readable(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readable_impl(self); + return_value = _io__Buffered_readable_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -459,12 +459,12 @@ static PyObject * _io__Buffered_writable_impl(buffered *self); static PyObject * -_io__Buffered_writable(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_writable_impl(self); + return_value = _io__Buffered_writable_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -484,12 +484,12 @@ static PyObject * _io__Buffered_name_get_impl(buffered *self); static PyObject * -_io__Buffered_name_get(buffered *self, void *Py_UNUSED(context)) +_io__Buffered_name_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_name_get_impl(self); + return_value = _io__Buffered_name_get_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -509,12 +509,12 @@ static PyObject * _io__Buffered_mode_get_impl(buffered *self); static PyObject * -_io__Buffered_mode_get(buffered *self, void *Py_UNUSED(context)) +_io__Buffered_mode_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_mode_get_impl(self); + return_value = _io__Buffered_mode_get_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -532,12 +532,12 @@ static PyObject * _io__Buffered_fileno_impl(buffered *self); static PyObject * -_io__Buffered_fileno(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_fileno_impl(self); + return_value = _io__Buffered_fileno_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -555,12 +555,12 @@ static PyObject * _io__Buffered_isatty_impl(buffered *self); static PyObject * -_io__Buffered_isatty(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_isatty_impl(self); + return_value = _io__Buffered_isatty_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -578,12 +578,12 @@ static PyObject * _io__Buffered_flush_impl(buffered *self); static PyObject * -_io__Buffered_flush(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_flush_impl(self); + return_value = _io__Buffered_flush_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -601,7 +601,7 @@ static PyObject * _io__Buffered_peek_impl(buffered *self, Py_ssize_t size); static PyObject * -_io__Buffered_peek(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_peek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = 0; @@ -626,7 +626,7 @@ _io__Buffered_peek(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_peek_impl(self, size); + return_value = _io__Buffered_peek_impl((buffered *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -645,7 +645,7 @@ static PyObject * _io__Buffered_read_impl(buffered *self, Py_ssize_t n); static PyObject * -_io__Buffered_read(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = -1; @@ -661,7 +661,7 @@ _io__Buffered_read(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_read_impl(self, n); + return_value = _io__Buffered_read_impl((buffered *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -680,7 +680,7 @@ static PyObject * _io__Buffered_read1_impl(buffered *self, Py_ssize_t n); static PyObject * -_io__Buffered_read1(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_read1(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = -1; @@ -705,7 +705,7 @@ _io__Buffered_read1(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_read1_impl(self, n); + return_value = _io__Buffered_read1_impl((buffered *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -724,7 +724,7 @@ static PyObject * _io__Buffered_readinto_impl(buffered *self, Py_buffer *buffer); static PyObject * -_io__Buffered_readinto(buffered *self, PyObject *arg) +_io__Buffered_readinto(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -734,7 +734,7 @@ _io__Buffered_readinto(buffered *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readinto_impl(self, &buffer); + return_value = _io__Buffered_readinto_impl((buffered *)self, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -758,7 +758,7 @@ static PyObject * _io__Buffered_readinto1_impl(buffered *self, Py_buffer *buffer); static PyObject * -_io__Buffered_readinto1(buffered *self, PyObject *arg) +_io__Buffered_readinto1(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -768,7 +768,7 @@ _io__Buffered_readinto1(buffered *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readinto1_impl(self, &buffer); + return_value = _io__Buffered_readinto1_impl((buffered *)self, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -792,7 +792,7 @@ static PyObject * _io__Buffered_readline_impl(buffered *self, Py_ssize_t size); static PyObject * -_io__Buffered_readline(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -808,7 +808,7 @@ _io__Buffered_readline(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readline_impl(self, size); + return_value = _io__Buffered_readline_impl((buffered *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -827,12 +827,12 @@ static PyObject * _io__Buffered_tell_impl(buffered *self); static PyObject * -_io__Buffered_tell(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_tell_impl(self); + return_value = _io__Buffered_tell_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -850,7 +850,7 @@ static PyObject * _io__Buffered_seek_impl(buffered *self, PyObject *targetobj, int whence); static PyObject * -_io__Buffered_seek(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *targetobj; @@ -869,7 +869,7 @@ _io__Buffered_seek(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_seek_impl(self, targetobj, whence); + return_value = _io__Buffered_seek_impl((buffered *)self, targetobj, whence); Py_END_CRITICAL_SECTION(); exit: @@ -888,7 +888,7 @@ static PyObject * _io__Buffered_truncate_impl(buffered *self, PyTypeObject *cls, PyObject *pos); static PyObject * -_io__Buffered_truncate(buffered *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__Buffered_truncate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -918,7 +918,7 @@ _io__Buffered_truncate(buffered *self, PyTypeObject *cls, PyObject *const *args, pos = args[0]; skip_optional_posonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_truncate_impl(self, cls, pos); + return_value = _io__Buffered_truncate_impl((buffered *)self, cls, pos); Py_END_CRITICAL_SECTION(); exit: @@ -1089,7 +1089,7 @@ static PyObject * _io_BufferedWriter_write_impl(buffered *self, Py_buffer *buffer); static PyObject * -_io_BufferedWriter_write(buffered *self, PyObject *arg) +_io_BufferedWriter_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -1098,7 +1098,7 @@ _io_BufferedWriter_write(buffered *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_BufferedWriter_write_impl(self, &buffer); + return_value = _io_BufferedWriter_write_impl((buffered *)self, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -1246,4 +1246,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=8f28a97987a9fbe1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f019d29701ba2556 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/bytesio.c.h b/Modules/_io/clinic/bytesio.c.h index 98d88698c5e9b7..5528df952c33fb 100644 --- a/Modules/_io/clinic/bytesio.c.h +++ b/Modules/_io/clinic/bytesio.c.h @@ -22,9 +22,9 @@ static PyObject * _io_BytesIO_readable_impl(bytesio *self); static PyObject * -_io_BytesIO_readable(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_readable_impl(self); + return _io_BytesIO_readable_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_writable__doc__, @@ -40,9 +40,9 @@ static PyObject * _io_BytesIO_writable_impl(bytesio *self); static PyObject * -_io_BytesIO_writable(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_writable_impl(self); + return _io_BytesIO_writable_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_seekable__doc__, @@ -58,9 +58,9 @@ static PyObject * _io_BytesIO_seekable_impl(bytesio *self); static PyObject * -_io_BytesIO_seekable(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_seekable_impl(self); + return _io_BytesIO_seekable_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_flush__doc__, @@ -76,9 +76,9 @@ static PyObject * _io_BytesIO_flush_impl(bytesio *self); static PyObject * -_io_BytesIO_flush(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_flush_impl(self); + return _io_BytesIO_flush_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_getbuffer__doc__, @@ -94,13 +94,13 @@ static PyObject * _io_BytesIO_getbuffer_impl(bytesio *self, PyTypeObject *cls); static PyObject * -_io_BytesIO_getbuffer(bytesio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_BytesIO_getbuffer(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "getbuffer() takes no arguments"); return NULL; } - return _io_BytesIO_getbuffer_impl(self, cls); + return _io_BytesIO_getbuffer_impl((bytesio *)self, cls); } PyDoc_STRVAR(_io_BytesIO_getvalue__doc__, @@ -116,9 +116,9 @@ static PyObject * _io_BytesIO_getvalue_impl(bytesio *self); static PyObject * -_io_BytesIO_getvalue(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_getvalue(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_getvalue_impl(self); + return _io_BytesIO_getvalue_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_isatty__doc__, @@ -136,9 +136,9 @@ static PyObject * _io_BytesIO_isatty_impl(bytesio *self); static PyObject * -_io_BytesIO_isatty(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_isatty_impl(self); + return _io_BytesIO_isatty_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_tell__doc__, @@ -154,9 +154,9 @@ static PyObject * _io_BytesIO_tell_impl(bytesio *self); static PyObject * -_io_BytesIO_tell(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_tell_impl(self); + return _io_BytesIO_tell_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_read__doc__, @@ -175,7 +175,7 @@ static PyObject * _io_BytesIO_read_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_read(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -190,7 +190,7 @@ _io_BytesIO_read(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_read_impl(self, size); + return_value = _io_BytesIO_read_impl((bytesio *)self, size); exit: return return_value; @@ -212,7 +212,7 @@ static PyObject * _io_BytesIO_read1_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_read1(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_read1(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -227,7 +227,7 @@ _io_BytesIO_read1(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_read1_impl(self, size); + return_value = _io_BytesIO_read1_impl((bytesio *)self, size); exit: return return_value; @@ -250,7 +250,7 @@ static PyObject * _io_BytesIO_readline_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_readline(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -265,7 +265,7 @@ _io_BytesIO_readline(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_readline_impl(self, size); + return_value = _io_BytesIO_readline_impl((bytesio *)self, size); exit: return return_value; @@ -288,7 +288,7 @@ static PyObject * _io_BytesIO_readlines_impl(bytesio *self, PyObject *arg); static PyObject * -_io_BytesIO_readlines(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_readlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *arg = Py_None; @@ -301,7 +301,7 @@ _io_BytesIO_readlines(bytesio *self, PyObject *const *args, Py_ssize_t nargs) } arg = args[0]; skip_optional: - return_value = _io_BytesIO_readlines_impl(self, arg); + return_value = _io_BytesIO_readlines_impl((bytesio *)self, arg); exit: return return_value; @@ -323,7 +323,7 @@ static PyObject * _io_BytesIO_readinto_impl(bytesio *self, Py_buffer *buffer); static PyObject * -_io_BytesIO_readinto(bytesio *self, PyObject *arg) +_io_BytesIO_readinto(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -332,7 +332,7 @@ _io_BytesIO_readinto(bytesio *self, PyObject *arg) _PyArg_BadArgument("readinto", "argument", "read-write bytes-like object", arg); goto exit; } - return_value = _io_BytesIO_readinto_impl(self, &buffer); + return_value = _io_BytesIO_readinto_impl((bytesio *)self, &buffer); exit: /* Cleanup for buffer */ @@ -359,10 +359,10 @@ static PyObject * _io_BytesIO_truncate_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_truncate(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_truncate(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - Py_ssize_t size = self->pos; + Py_ssize_t size = ((bytesio *)self)->pos; if (!_PyArg_CheckPositional("truncate", nargs, 0, 1)) { goto exit; @@ -374,7 +374,7 @@ _io_BytesIO_truncate(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_truncate_impl(self, size); + return_value = _io_BytesIO_truncate_impl((bytesio *)self, size); exit: return return_value; @@ -399,7 +399,7 @@ static PyObject * _io_BytesIO_seek_impl(bytesio *self, Py_ssize_t pos, int whence); static PyObject * -_io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t pos; @@ -428,7 +428,7 @@ _io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_seek_impl(self, pos, whence); + return_value = _io_BytesIO_seek_impl((bytesio *)self, pos, whence); exit: return return_value; @@ -471,9 +471,9 @@ static PyObject * _io_BytesIO_close_impl(bytesio *self); static PyObject * -_io_BytesIO_close(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_close_impl(self); + return _io_BytesIO_close_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO___init____doc__, @@ -535,4 +535,4 @@ _io_BytesIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=985ff54e89f6036e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8a5e153bc7584b55 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/fileio.c.h b/Modules/_io/clinic/fileio.c.h index 0b8b4a49ac24b6..22d27bce67799e 100644 --- a/Modules/_io/clinic/fileio.c.h +++ b/Modules/_io/clinic/fileio.c.h @@ -25,13 +25,13 @@ static PyObject * _io_FileIO_close_impl(fileio *self, PyTypeObject *cls); static PyObject * -_io_FileIO_close(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_close(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "close() takes no arguments"); return NULL; } - return _io_FileIO_close_impl(self, cls); + return _io_FileIO_close_impl((fileio *)self, cls); } PyDoc_STRVAR(_io_FileIO___init____doc__, @@ -151,9 +151,9 @@ static PyObject * _io_FileIO_fileno_impl(fileio *self); static PyObject * -_io_FileIO_fileno(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_fileno_impl(self); + return _io_FileIO_fileno_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_readable__doc__, @@ -169,9 +169,9 @@ static PyObject * _io_FileIO_readable_impl(fileio *self); static PyObject * -_io_FileIO_readable(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_readable_impl(self); + return _io_FileIO_readable_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_writable__doc__, @@ -187,9 +187,9 @@ static PyObject * _io_FileIO_writable_impl(fileio *self); static PyObject * -_io_FileIO_writable(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_writable_impl(self); + return _io_FileIO_writable_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_seekable__doc__, @@ -205,9 +205,9 @@ static PyObject * _io_FileIO_seekable_impl(fileio *self); static PyObject * -_io_FileIO_seekable(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_seekable_impl(self); + return _io_FileIO_seekable_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_readinto__doc__, @@ -223,7 +223,7 @@ static PyObject * _io_FileIO_readinto_impl(fileio *self, PyTypeObject *cls, Py_buffer *buffer); static PyObject * -_io_FileIO_readinto(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_readinto(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -251,7 +251,7 @@ _io_FileIO_readinto(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_s _PyArg_BadArgument("readinto", "argument 1", "read-write bytes-like object", args[0]); goto exit; } - return_value = _io_FileIO_readinto_impl(self, cls, &buffer); + return_value = _io_FileIO_readinto_impl((fileio *)self, cls, &buffer); exit: /* Cleanup for buffer */ @@ -278,9 +278,9 @@ static PyObject * _io_FileIO_readall_impl(fileio *self); static PyObject * -_io_FileIO_readall(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_readall_impl(self); + return _io_FileIO_readall_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_read__doc__, @@ -300,7 +300,7 @@ static PyObject * _io_FileIO_read_impl(fileio *self, PyTypeObject *cls, Py_ssize_t size); static PyObject * -_io_FileIO_read(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_read(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -331,7 +331,7 @@ _io_FileIO_read(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize goto exit; } skip_optional_posonly: - return_value = _io_FileIO_read_impl(self, cls, size); + return_value = _io_FileIO_read_impl((fileio *)self, cls, size); exit: return return_value; @@ -354,7 +354,7 @@ static PyObject * _io_FileIO_write_impl(fileio *self, PyTypeObject *cls, Py_buffer *b); static PyObject * -_io_FileIO_write(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -381,7 +381,7 @@ _io_FileIO_write(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssiz if (PyObject_GetBuffer(args[0], &b, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _io_FileIO_write_impl(self, cls, &b); + return_value = _io_FileIO_write_impl((fileio *)self, cls, &b); exit: /* Cleanup for b */ @@ -413,7 +413,7 @@ static PyObject * _io_FileIO_seek_impl(fileio *self, PyObject *pos, int whence); static PyObject * -_io_FileIO_seek(fileio *self, PyObject *const *args, Py_ssize_t nargs) +_io_FileIO_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *pos; @@ -431,7 +431,7 @@ _io_FileIO_seek(fileio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_FileIO_seek_impl(self, pos, whence); + return_value = _io_FileIO_seek_impl((fileio *)self, pos, whence); exit: return return_value; @@ -452,9 +452,9 @@ static PyObject * _io_FileIO_tell_impl(fileio *self); static PyObject * -_io_FileIO_tell(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_tell_impl(self); + return _io_FileIO_tell_impl((fileio *)self); } #if defined(HAVE_FTRUNCATE) @@ -475,7 +475,7 @@ static PyObject * _io_FileIO_truncate_impl(fileio *self, PyTypeObject *cls, PyObject *posobj); static PyObject * -_io_FileIO_truncate(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_truncate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -504,7 +504,7 @@ _io_FileIO_truncate(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_s } posobj = args[0]; skip_optional_posonly: - return_value = _io_FileIO_truncate_impl(self, cls, posobj); + return_value = _io_FileIO_truncate_impl((fileio *)self, cls, posobj); exit: return return_value; @@ -525,12 +525,12 @@ static PyObject * _io_FileIO_isatty_impl(fileio *self); static PyObject * -_io_FileIO_isatty(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_isatty_impl(self); + return _io_FileIO_isatty_impl((fileio *)self); } #ifndef _IO_FILEIO_TRUNCATE_METHODDEF #define _IO_FILEIO_TRUNCATE_METHODDEF #endif /* !defined(_IO_FILEIO_TRUNCATE_METHODDEF) */ -/*[clinic end generated code: output=1c262ae135da4dcb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dcbeb6a0b13e4b1f input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/stringio.c.h b/Modules/_io/clinic/stringio.c.h index 6f9205af32f010..bc571698806bde 100644 --- a/Modules/_io/clinic/stringio.c.h +++ b/Modules/_io/clinic/stringio.c.h @@ -23,12 +23,12 @@ static PyObject * _io_StringIO_getvalue_impl(stringio *self); static PyObject * -_io_StringIO_getvalue(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_getvalue(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_getvalue_impl(self); + return_value = _io_StringIO_getvalue_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -47,12 +47,12 @@ static PyObject * _io_StringIO_tell_impl(stringio *self); static PyObject * -_io_StringIO_tell(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_tell_impl(self); + return_value = _io_StringIO_tell_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -74,7 +74,7 @@ static PyObject * _io_StringIO_read_impl(stringio *self, Py_ssize_t size); static PyObject * -_io_StringIO_read(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -90,7 +90,7 @@ _io_StringIO_read(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_read_impl(self, size); + return_value = _io_StringIO_read_impl((stringio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -112,7 +112,7 @@ static PyObject * _io_StringIO_readline_impl(stringio *self, Py_ssize_t size); static PyObject * -_io_StringIO_readline(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -128,7 +128,7 @@ _io_StringIO_readline(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_readline_impl(self, size); + return_value = _io_StringIO_readline_impl((stringio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -152,10 +152,10 @@ static PyObject * _io_StringIO_truncate_impl(stringio *self, Py_ssize_t size); static PyObject * -_io_StringIO_truncate(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_truncate(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - Py_ssize_t size = self->pos; + Py_ssize_t size = ((stringio *)self)->pos; if (!_PyArg_CheckPositional("truncate", nargs, 0, 1)) { goto exit; @@ -168,7 +168,7 @@ _io_StringIO_truncate(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_truncate_impl(self, size); + return_value = _io_StringIO_truncate_impl((stringio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -194,7 +194,7 @@ static PyObject * _io_StringIO_seek_impl(stringio *self, Py_ssize_t pos, int whence); static PyObject * -_io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t pos; @@ -224,7 +224,7 @@ _io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_seek_impl(self, pos, whence); + return_value = _io_StringIO_seek_impl((stringio *)self, pos, whence); Py_END_CRITICAL_SECTION(); exit: @@ -252,7 +252,7 @@ _io_StringIO_write(stringio *self, PyObject *obj) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_write_impl(self, obj); + return_value = _io_StringIO_write_impl((stringio *)self, obj); Py_END_CRITICAL_SECTION(); return return_value; @@ -276,12 +276,12 @@ static PyObject * _io_StringIO_close_impl(stringio *self); static PyObject * -_io_StringIO_close(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_close_impl(self); + return_value = _io_StringIO_close_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -371,12 +371,12 @@ static PyObject * _io_StringIO_readable_impl(stringio *self); static PyObject * -_io_StringIO_readable(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_readable_impl(self); + return_value = _io_StringIO_readable_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -395,12 +395,12 @@ static PyObject * _io_StringIO_writable_impl(stringio *self); static PyObject * -_io_StringIO_writable(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_writable_impl(self); + return_value = _io_StringIO_writable_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -419,12 +419,12 @@ static PyObject * _io_StringIO_seekable_impl(stringio *self); static PyObject * -_io_StringIO_seekable(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_seekable_impl(self); + return_value = _io_StringIO_seekable_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -442,12 +442,12 @@ static PyObject * _io_StringIO___getstate___impl(stringio *self); static PyObject * -_io_StringIO___getstate__(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO___getstate__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO___getstate___impl(self); + return_value = _io_StringIO___getstate___impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -470,7 +470,7 @@ _io_StringIO___setstate__(stringio *self, PyObject *state) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO___setstate___impl(self, state); + return_value = _io_StringIO___setstate___impl((stringio *)self, state); Py_END_CRITICAL_SECTION(); return return_value; @@ -490,12 +490,12 @@ static PyObject * _io_StringIO_closed_get_impl(stringio *self); static PyObject * -_io_StringIO_closed_get(stringio *self, void *Py_UNUSED(context)) +_io_StringIO_closed_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_closed_get_impl(self); + return_value = _io_StringIO_closed_get_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -515,12 +515,12 @@ static PyObject * _io_StringIO_line_buffering_get_impl(stringio *self); static PyObject * -_io_StringIO_line_buffering_get(stringio *self, void *Py_UNUSED(context)) +_io_StringIO_line_buffering_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_line_buffering_get_impl(self); + return_value = _io_StringIO_line_buffering_get_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -540,14 +540,14 @@ static PyObject * _io_StringIO_newlines_get_impl(stringio *self); static PyObject * -_io_StringIO_newlines_get(stringio *self, void *Py_UNUSED(context)) +_io_StringIO_newlines_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_newlines_get_impl(self); + return_value = _io_StringIO_newlines_get_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=9d2b092274469d42 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7796e223e778a214 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index 0acc1f060c811b..9ce1d70ad71052 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -379,7 +379,7 @@ _io_IncrementalNewlineDecoder_decode_impl(nldecoder_object *self, PyObject *input, int final); static PyObject * -_io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_IncrementalNewlineDecoder_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -426,7 +426,7 @@ _io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *ar goto exit; } skip_optional_pos: - return_value = _io_IncrementalNewlineDecoder_decode_impl(self, input, final); + return_value = _io_IncrementalNewlineDecoder_decode_impl((nldecoder_object *)self, input, final); exit: return return_value; @@ -444,9 +444,9 @@ static PyObject * _io_IncrementalNewlineDecoder_getstate_impl(nldecoder_object *self); static PyObject * -_io_IncrementalNewlineDecoder_getstate(nldecoder_object *self, PyObject *Py_UNUSED(ignored)) +_io_IncrementalNewlineDecoder_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_IncrementalNewlineDecoder_getstate_impl(self); + return _io_IncrementalNewlineDecoder_getstate_impl((nldecoder_object *)self); } PyDoc_STRVAR(_io_IncrementalNewlineDecoder_setstate__doc__, @@ -469,9 +469,9 @@ static PyObject * _io_IncrementalNewlineDecoder_reset_impl(nldecoder_object *self); static PyObject * -_io_IncrementalNewlineDecoder_reset(nldecoder_object *self, PyObject *Py_UNUSED(ignored)) +_io_IncrementalNewlineDecoder_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_IncrementalNewlineDecoder_reset_impl(self); + return _io_IncrementalNewlineDecoder_reset_impl((nldecoder_object *)self); } PyDoc_STRVAR(_io_TextIOWrapper___init____doc__, @@ -654,7 +654,7 @@ _io_TextIOWrapper_reconfigure_impl(textio *self, PyObject *encoding, PyObject *write_through_obj); static PyObject * -_io_TextIOWrapper_reconfigure(textio *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_TextIOWrapper_reconfigure(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -725,7 +725,7 @@ _io_TextIOWrapper_reconfigure(textio *self, PyObject *const *args, Py_ssize_t na write_through_obj = args[4]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_reconfigure_impl(self, encoding, errors, newline_obj, line_buffering_obj, write_through_obj); + return_value = _io_TextIOWrapper_reconfigure_impl((textio *)self, encoding, errors, newline_obj, line_buffering_obj, write_through_obj); Py_END_CRITICAL_SECTION(); exit: @@ -744,12 +744,12 @@ static PyObject * _io_TextIOWrapper_detach_impl(textio *self); static PyObject * -_io_TextIOWrapper_detach(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_detach_impl(self); + return_value = _io_TextIOWrapper_detach_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -767,7 +767,7 @@ static PyObject * _io_TextIOWrapper_write_impl(textio *self, PyObject *text); static PyObject * -_io_TextIOWrapper_write(textio *self, PyObject *arg) +_io_TextIOWrapper_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *text; @@ -778,7 +778,7 @@ _io_TextIOWrapper_write(textio *self, PyObject *arg) } text = arg; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_write_impl(self, text); + return_value = _io_TextIOWrapper_write_impl((textio *)self, text); Py_END_CRITICAL_SECTION(); exit: @@ -797,7 +797,7 @@ static PyObject * _io_TextIOWrapper_read_impl(textio *self, Py_ssize_t n); static PyObject * -_io_TextIOWrapper_read(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = -1; @@ -813,7 +813,7 @@ _io_TextIOWrapper_read(textio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_read_impl(self, n); + return_value = _io_TextIOWrapper_read_impl((textio *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -832,7 +832,7 @@ static PyObject * _io_TextIOWrapper_readline_impl(textio *self, Py_ssize_t size); static PyObject * -_io_TextIOWrapper_readline(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -857,7 +857,7 @@ _io_TextIOWrapper_readline(textio *self, PyObject *const *args, Py_ssize_t nargs } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_readline_impl(self, size); + return_value = _io_TextIOWrapper_readline_impl((textio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -894,7 +894,7 @@ static PyObject * _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence); static PyObject * -_io_TextIOWrapper_seek(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *cookieObj; @@ -913,7 +913,7 @@ _io_TextIOWrapper_seek(textio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_seek_impl(self, cookieObj, whence); + return_value = _io_TextIOWrapper_seek_impl((textio *)self, cookieObj, whence); Py_END_CRITICAL_SECTION(); exit: @@ -936,12 +936,12 @@ static PyObject * _io_TextIOWrapper_tell_impl(textio *self); static PyObject * -_io_TextIOWrapper_tell(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_tell_impl(self); + return_value = _io_TextIOWrapper_tell_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -959,7 +959,7 @@ static PyObject * _io_TextIOWrapper_truncate_impl(textio *self, PyObject *pos); static PyObject * -_io_TextIOWrapper_truncate(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_truncate(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *pos = Py_None; @@ -973,7 +973,7 @@ _io_TextIOWrapper_truncate(textio *self, PyObject *const *args, Py_ssize_t nargs pos = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_truncate_impl(self, pos); + return_value = _io_TextIOWrapper_truncate_impl((textio *)self, pos); Py_END_CRITICAL_SECTION(); exit: @@ -992,12 +992,12 @@ static PyObject * _io_TextIOWrapper_fileno_impl(textio *self); static PyObject * -_io_TextIOWrapper_fileno(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_fileno_impl(self); + return_value = _io_TextIOWrapper_fileno_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1015,12 +1015,12 @@ static PyObject * _io_TextIOWrapper_seekable_impl(textio *self); static PyObject * -_io_TextIOWrapper_seekable(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_seekable_impl(self); + return_value = _io_TextIOWrapper_seekable_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1038,12 +1038,12 @@ static PyObject * _io_TextIOWrapper_readable_impl(textio *self); static PyObject * -_io_TextIOWrapper_readable(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_readable_impl(self); + return_value = _io_TextIOWrapper_readable_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1061,12 +1061,12 @@ static PyObject * _io_TextIOWrapper_writable_impl(textio *self); static PyObject * -_io_TextIOWrapper_writable(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_writable_impl(self); + return_value = _io_TextIOWrapper_writable_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1084,12 +1084,12 @@ static PyObject * _io_TextIOWrapper_isatty_impl(textio *self); static PyObject * -_io_TextIOWrapper_isatty(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_isatty_impl(self); + return_value = _io_TextIOWrapper_isatty_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1107,12 +1107,12 @@ static PyObject * _io_TextIOWrapper_flush_impl(textio *self); static PyObject * -_io_TextIOWrapper_flush(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_flush_impl(self); + return_value = _io_TextIOWrapper_flush_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1130,12 +1130,12 @@ static PyObject * _io_TextIOWrapper_close_impl(textio *self); static PyObject * -_io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_close_impl(self); + return_value = _io_TextIOWrapper_close_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1155,12 +1155,12 @@ static PyObject * _io_TextIOWrapper_name_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_name_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_name_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_name_get_impl(self); + return_value = _io_TextIOWrapper_name_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1180,12 +1180,12 @@ static PyObject * _io_TextIOWrapper_closed_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_closed_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_closed_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_closed_get_impl(self); + return_value = _io_TextIOWrapper_closed_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1205,12 +1205,12 @@ static PyObject * _io_TextIOWrapper_newlines_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_newlines_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_newlines_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_newlines_get_impl(self); + return_value = _io_TextIOWrapper_newlines_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1230,12 +1230,12 @@ static PyObject * _io_TextIOWrapper_errors_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_errors_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_errors_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_errors_get_impl(self); + return_value = _io_TextIOWrapper_errors_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1255,12 +1255,12 @@ static PyObject * _io_TextIOWrapper__CHUNK_SIZE_get_impl(textio *self); static PyObject * -_io_TextIOWrapper__CHUNK_SIZE_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper__CHUNK_SIZE_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper__CHUNK_SIZE_get_impl(self); + return_value = _io_TextIOWrapper__CHUNK_SIZE_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1280,14 +1280,14 @@ static int _io_TextIOWrapper__CHUNK_SIZE_set_impl(textio *self, PyObject *value); static int -_io_TextIOWrapper__CHUNK_SIZE_set(textio *self, PyObject *value, void *Py_UNUSED(context)) +_io_TextIOWrapper__CHUNK_SIZE_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper__CHUNK_SIZE_set_impl(self, value); + return_value = _io_TextIOWrapper__CHUNK_SIZE_set_impl((textio *)self, value); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=423a320f087792b9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6e64e43113a97340 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/winconsoleio.c.h b/Modules/_io/clinic/winconsoleio.c.h index df281dfeb13fef..ba6dcde6e01064 100644 --- a/Modules/_io/clinic/winconsoleio.c.h +++ b/Modules/_io/clinic/winconsoleio.c.h @@ -27,13 +27,13 @@ static PyObject * _io__WindowsConsoleIO_close_impl(winconsoleio *self, PyTypeObject *cls); static PyObject * -_io__WindowsConsoleIO_close(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_close(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "close() takes no arguments"); return NULL; } - return _io__WindowsConsoleIO_close_impl(self, cls); + return _io__WindowsConsoleIO_close_impl((winconsoleio *)self, cls); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -154,9 +154,9 @@ static PyObject * _io__WindowsConsoleIO_fileno_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_fileno(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_fileno_impl(self); + return _io__WindowsConsoleIO_fileno_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -176,9 +176,9 @@ static PyObject * _io__WindowsConsoleIO_readable_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_readable(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_readable_impl(self); + return _io__WindowsConsoleIO_readable_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -198,9 +198,9 @@ static PyObject * _io__WindowsConsoleIO_writable_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_writable(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_writable_impl(self); + return _io__WindowsConsoleIO_writable_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -221,7 +221,7 @@ _io__WindowsConsoleIO_readinto_impl(winconsoleio *self, PyTypeObject *cls, Py_buffer *buffer); static PyObject * -_io__WindowsConsoleIO_readinto(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_readinto(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -249,7 +249,7 @@ _io__WindowsConsoleIO_readinto(winconsoleio *self, PyTypeObject *cls, PyObject * _PyArg_BadArgument("readinto", "argument 1", "read-write bytes-like object", args[0]); goto exit; } - return_value = _io__WindowsConsoleIO_readinto_impl(self, cls, &buffer); + return_value = _io__WindowsConsoleIO_readinto_impl((winconsoleio *)self, cls, &buffer); exit: /* Cleanup for buffer */ @@ -279,9 +279,9 @@ static PyObject * _io__WindowsConsoleIO_readall_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_readall(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_readall_impl(self); + return _io__WindowsConsoleIO_readall_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -306,7 +306,7 @@ _io__WindowsConsoleIO_read_impl(winconsoleio *self, PyTypeObject *cls, Py_ssize_t size); static PyObject * -_io__WindowsConsoleIO_read(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_read(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -337,7 +337,7 @@ _io__WindowsConsoleIO_read(winconsoleio *self, PyTypeObject *cls, PyObject *cons goto exit; } skip_optional_posonly: - return_value = _io__WindowsConsoleIO_read_impl(self, cls, size); + return_value = _io__WindowsConsoleIO_read_impl((winconsoleio *)self, cls, size); exit: return return_value; @@ -364,7 +364,7 @@ _io__WindowsConsoleIO_write_impl(winconsoleio *self, PyTypeObject *cls, Py_buffer *b); static PyObject * -_io__WindowsConsoleIO_write(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -391,7 +391,7 @@ _io__WindowsConsoleIO_write(winconsoleio *self, PyTypeObject *cls, PyObject *con if (PyObject_GetBuffer(args[0], &b, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _io__WindowsConsoleIO_write_impl(self, cls, &b); + return_value = _io__WindowsConsoleIO_write_impl((winconsoleio *)self, cls, &b); exit: /* Cleanup for b */ @@ -419,9 +419,9 @@ static PyObject * _io__WindowsConsoleIO_isatty_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_isatty_impl(self); + return _io__WindowsConsoleIO_isatty_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -461,4 +461,4 @@ _io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #define _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #endif /* !defined(_IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF) */ -/*[clinic end generated code: output=78e0f6abf4de2d6d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=edc47f5c49589045 input=a9049054013a1b77]*/ diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index 65e8d97aa8ac19..d1a71298a9087f 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -30,7 +30,7 @@ typedef struct { _PyUnicodeWriter is destroyed. */ int state; - _PyUnicodeWriter writer; + PyUnicodeWriter *writer; char ok; /* initialized? */ char closed; @@ -129,14 +129,18 @@ resize_buffer(stringio *self, size_t size) static PyObject * make_intermediate(stringio *self) { - PyObject *intermediate = _PyUnicodeWriter_Finish(&self->writer); + PyObject *intermediate = PyUnicodeWriter_Finish(self->writer); + self->writer = NULL; self->state = STATE_REALIZED; if (intermediate == NULL) return NULL; - _PyUnicodeWriter_Init(&self->writer); - self->writer.overallocate = 1; - if (_PyUnicodeWriter_WriteStr(&self->writer, intermediate)) { + self->writer = PyUnicodeWriter_Create(0); + if (self->writer == NULL) { + Py_DECREF(intermediate); + return NULL; + } + if (PyUnicodeWriter_WriteStr(self->writer, intermediate)) { Py_DECREF(intermediate); return NULL; } @@ -155,7 +159,8 @@ realize(stringio *self) assert(self->state == STATE_ACCUMULATING); self->state = STATE_REALIZED; - intermediate = _PyUnicodeWriter_Finish(&self->writer); + intermediate = PyUnicodeWriter_Finish(self->writer); + self->writer = NULL; if (intermediate == NULL) return -1; @@ -217,7 +222,7 @@ write_str(stringio *self, PyObject *obj) if (self->state == STATE_ACCUMULATING) { if (self->string_size == self->pos) { - if (_PyUnicodeWriter_WriteStr(&self->writer, decoded)) + if (PyUnicodeWriter_WriteStr(self->writer, decoded)) goto fail; goto success; } @@ -437,7 +442,7 @@ stringio_iternext(stringio *self) /*[clinic input] @critical_section _io.StringIO.truncate - pos as size: Py_ssize_t(accept={int, NoneType}, c_default="self->pos") = None + pos as size: Py_ssize_t(accept={int, NoneType}, c_default="((stringio *)self)->pos") = None / Truncate size to pos. @@ -449,7 +454,7 @@ Returns the new absolute position. static PyObject * _io_StringIO_truncate_impl(stringio *self, Py_ssize_t size) -/*[clinic end generated code: output=eb3aef8e06701365 input=461b872dce238452]*/ +/*[clinic end generated code: output=eb3aef8e06701365 input=fa8a6c98bb2ba780]*/ { CHECK_INITIALIZED(self); CHECK_CLOSED(self); @@ -577,7 +582,8 @@ _io_StringIO_close_impl(stringio *self) /* Free up some memory */ if (resize_buffer(self, 0) < 0) return NULL; - _PyUnicodeWriter_Dealloc(&self->writer); + PyUnicodeWriter_Discard(self->writer); + self->writer = NULL; Py_CLEAR(self->readnl); Py_CLEAR(self->writenl); Py_CLEAR(self->decoder); @@ -615,7 +621,7 @@ stringio_dealloc(stringio *self) PyMem_Free(self->buf); self->buf = NULL; } - _PyUnicodeWriter_Dealloc(&self->writer); + PyUnicodeWriter_Discard(self->writer); (void)stringio_clear(self); if (self->weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject *) self); @@ -699,7 +705,8 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, self->ok = 0; - _PyUnicodeWriter_Dealloc(&self->writer); + PyUnicodeWriter_Discard(self->writer); + self->writer = NULL; Py_CLEAR(self->readnl); Py_CLEAR(self->writenl); Py_CLEAR(self->decoder); @@ -754,8 +761,10 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, /* Empty stringio object, we can start by accumulating */ if (resize_buffer(self, 0) < 0) return -1; - _PyUnicodeWriter_Init(&self->writer); - self->writer.overallocate = 1; + self->writer = PyUnicodeWriter_Create(0); + if (self->writer == NULL) { + return -1; + } self->state = STATE_ACCUMULATING; } self->pos = 0; diff --git a/Modules/_json.c b/Modules/_json.c index a99abbe72bf7a0..5532e252819bbd 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -302,7 +302,7 @@ raise_errmsg(const char *msg, PyObject *s, Py_ssize_t end) /* Use JSONDecodeError exception to raise a nice looking ValueError subclass */ _Py_DECLARE_STR(json_decoder, "json.decoder"); PyObject *JSONDecodeError = - _PyImport_GetModuleAttr(&_Py_STR(json_decoder), &_Py_ID(JSONDecodeError)); + PyImport_ImportModuleAttr(&_Py_STR(json_decoder), &_Py_ID(JSONDecodeError)); if (JSONDecodeError == NULL) { return; } @@ -353,6 +353,13 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) { return tpl; } +static inline int +_PyUnicodeWriter_IsEmpty(PyUnicodeWriter *writer_pub) +{ + _PyUnicodeWriter *writer = (_PyUnicodeWriter*)writer_pub; + return (writer->pos == 0); +} + static PyObject * scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr) { @@ -371,9 +378,10 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next const void *buf; int kind; - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto bail; + } len = PyUnicode_GET_LENGTH(pystr); buf = PyUnicode_DATA(pystr); @@ -404,11 +412,12 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next if (c == '"') { // Fast path for simple case. - if (writer.buffer == NULL) { + if (_PyUnicodeWriter_IsEmpty(writer)) { PyObject *ret = PyUnicode_Substring(pystr, end, next); if (ret == NULL) { goto bail; } + PyUnicodeWriter_Discard(writer); *next_end_ptr = next + 1;; return ret; } @@ -420,7 +429,7 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next /* Pick up this chunk if it's not zero length */ if (next != end) { - if (_PyUnicodeWriter_WriteSubstring(&writer, pystr, end, next) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, pystr, end, next) < 0) { goto bail; } } @@ -511,18 +520,18 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next end -= 6; } } - if (_PyUnicodeWriter_WriteChar(&writer, c) < 0) { + if (PyUnicodeWriter_WriteChar(writer, c) < 0) { goto bail; } } - rval = _PyUnicodeWriter_Finish(&writer); + rval = PyUnicodeWriter_Finish(writer); *next_end_ptr = end; return rval; bail: *next_end_ptr = -1; - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index 51ad9fc7da8492..eab26b39be14ef 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -97,7 +97,8 @@ static PyTime_t CallExternalTimer(ProfilerObject *pObj) pObj->flags &= ~POF_EXT_TIMER; if (o == NULL) { - PyErr_WriteUnraisable(pObj->externalTimer); + PyErr_FormatUnraisable("Exception ignored while calling " + "_lsprof timer %R", pObj->externalTimer); return 0; } @@ -116,7 +117,8 @@ static PyTime_t CallExternalTimer(ProfilerObject *pObj) } Py_DECREF(o); if (err < 0) { - PyErr_WriteUnraisable(pObj->externalTimer); + PyErr_FormatUnraisable("Exception ignored while calling " + "_lsprof timer %R", pObj->externalTimer); return 0; } return result; @@ -775,7 +777,7 @@ _lsprof_Profiler_enable_impl(ProfilerObject *self, int subcalls, return NULL; } - PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); + PyObject* monitoring = PyImport_ImportModuleAttrString("sys", "monitoring"); if (!monitoring) { return NULL; } @@ -857,7 +859,7 @@ _lsprof_Profiler_disable_impl(ProfilerObject *self) } if (self->flags & POF_ENABLED) { PyObject* result = NULL; - PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); + PyObject* monitoring = PyImport_ImportModuleAttrString("sys", "monitoring"); if (!monitoring) { return NULL; @@ -933,7 +935,8 @@ profiler_dealloc(ProfilerObject *op) if (op->flags & POF_ENABLED) { PyThreadState *tstate = _PyThreadState_GET(); if (_PyEval_SetProfile(tstate, NULL, NULL) < 0) { - PyErr_FormatUnraisable("Exception ignored when destroying _lsprof profiler"); + PyErr_FormatUnraisable("Exception ignored while " + "destroying _lsprof profiler"); } } @@ -973,7 +976,7 @@ profiler_init_impl(ProfilerObject *self, PyObject *timer, double timeunit, Py_XSETREF(self->externalTimer, Py_XNewRef(timer)); self->tool_id = PY_MONITORING_PROFILER_ID; - PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); + PyObject* monitoring = PyImport_ImportModuleAttrString("sys", "monitoring"); if (!monitoring) { return -1; } diff --git a/Modules/_multiprocessing/clinic/semaphore.c.h b/Modules/_multiprocessing/clinic/semaphore.c.h index 2702c3369c76ed..e789137ec1e013 100644 --- a/Modules/_multiprocessing/clinic/semaphore.c.h +++ b/Modules/_multiprocessing/clinic/semaphore.c.h @@ -25,7 +25,7 @@ _multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking, PyObject *timeout_obj); static PyObject * -_multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multiprocessing_SemLock_acquire(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -78,7 +78,7 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ timeout_obj = args[1]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_acquire_impl(self, blocking, timeout_obj); + return_value = _multiprocessing_SemLock_acquire_impl((SemLockObject *)self, blocking, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -102,12 +102,12 @@ static PyObject * _multiprocessing_SemLock_release_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock_release(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock_release(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_release_impl(self); + return_value = _multiprocessing_SemLock_release_impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -131,7 +131,7 @@ _multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking, PyObject *timeout_obj); static PyObject * -_multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multiprocessing_SemLock_acquire(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -184,7 +184,7 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ timeout_obj = args[1]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_acquire_impl(self, blocking, timeout_obj); + return_value = _multiprocessing_SemLock_acquire_impl((SemLockObject *)self, blocking, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -208,12 +208,12 @@ static PyObject * _multiprocessing_SemLock_release_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock_release(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock_release(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_release_impl(self); + return_value = _multiprocessing_SemLock_release_impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -358,12 +358,12 @@ static PyObject * _multiprocessing_SemLock__count_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__count(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__count(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock__count_impl(self); + return_value = _multiprocessing_SemLock__count_impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -386,9 +386,9 @@ static PyObject * _multiprocessing_SemLock__is_mine_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__is_mine(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__is_mine(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__is_mine_impl(self); + return _multiprocessing_SemLock__is_mine_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -408,9 +408,9 @@ static PyObject * _multiprocessing_SemLock__get_value_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__get_value(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__get_value(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__get_value_impl(self); + return _multiprocessing_SemLock__get_value_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -430,9 +430,9 @@ static PyObject * _multiprocessing_SemLock__is_zero_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__is_zero(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__is_zero(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__is_zero_impl(self); + return _multiprocessing_SemLock__is_zero_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -452,9 +452,9 @@ static PyObject * _multiprocessing_SemLock__after_fork_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__after_fork(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__after_fork(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__after_fork_impl(self); + return _multiprocessing_SemLock__after_fork_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -474,12 +474,12 @@ static PyObject * _multiprocessing_SemLock___enter___impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock___enter__(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock___enter__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock___enter___impl(self); + return_value = _multiprocessing_SemLock___enter___impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -504,7 +504,7 @@ _multiprocessing_SemLock___exit___impl(SemLockObject *self, PyObject *exc_value, PyObject *exc_tb); static PyObject * -_multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py_ssize_t nargs) +_multiprocessing_SemLock___exit__(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type = Py_None; @@ -528,7 +528,7 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py exc_tb = args[2]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock___exit___impl(self, exc_type, exc_value, exc_tb); + return_value = _multiprocessing_SemLock___exit___impl((SemLockObject *)self, exc_type, exc_value, exc_tb); Py_END_CRITICAL_SECTION(); exit: @@ -576,4 +576,4 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py #ifndef _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #define _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #endif /* !defined(_MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF) */ -/*[clinic end generated code: output=9023d3e48a24afd2 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e28d0fdbfefd1235 input=a9049054013a1b77]*/ diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c index 9eef7c25636899..036db2cd4c6c85 100644 --- a/Modules/_multiprocessing/semaphore.c +++ b/Modules/_multiprocessing/semaphore.c @@ -28,6 +28,8 @@ typedef struct { char *name; } SemLockObject; +#define _SemLockObject_CAST(op) ((SemLockObject *)(op)) + /*[python input] class SEM_HANDLE_converter(CConverter): type = "SEM_HANDLE" @@ -576,8 +578,9 @@ _multiprocessing_SemLock__rebuild_impl(PyTypeObject *type, SEM_HANDLE handle, } static void -semlock_dealloc(SemLockObject* self) +semlock_dealloc(PyObject *op) { + SemLockObject *self = _SemLockObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); if (self->handle != SEM_FAILED) @@ -718,7 +721,7 @@ _multiprocessing_SemLock___exit___impl(SemLockObject *self, } static int -semlock_traverse(SemLockObject *s, visitproc visit, void *arg) +semlock_traverse(PyObject *s, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(s)); return 0; diff --git a/Modules/_operator.c b/Modules/_operator.c index ce3ef015710223..59987b8f143da2 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -1868,7 +1868,7 @@ methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored)) PyObject *constructor; PyObject *newargs[2]; - partial = _PyImport_GetModuleAttrString("functools", "partial"); + partial = PyImport_ImportModuleAttrString("functools", "partial"); if (!partial) return NULL; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 599b5f92c2a1f7..5641f93391c551 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -362,7 +362,7 @@ _Pickle_InitState(PickleState *st) } Py_CLEAR(compat_pickle); - st->codecs_encode = _PyImport_GetModuleAttrString("codecs", "encode"); + st->codecs_encode = PyImport_ImportModuleAttrString("codecs", "encode"); if (st->codecs_encode == NULL) { goto error; } @@ -373,7 +373,7 @@ _Pickle_InitState(PickleState *st) goto error; } - st->partial = _PyImport_GetModuleAttrString("functools", "partial"); + st->partial = PyImport_ImportModuleAttrString("functools", "partial"); if (!st->partial) goto error; @@ -2159,8 +2159,10 @@ save_long(PicklerObject *self, PyObject *obj) unsigned char *pdata; char header[5]; int i; - int sign = _PyLong_Sign(obj); + int sign; + assert(PyLong_Check(obj)); + (void)PyLong_GetSign(obj, &sign); if (sign == 0) { header[0] = LONG1; header[1] = 0; /* It's 0 -- an empty bytestring. */ diff --git a/Modules/_sqlite/blob.c b/Modules/_sqlite/blob.c index d1a549a971c24a..390375628bfb4f 100644 --- a/Modules/_sqlite/blob.c +++ b/Modules/_sqlite/blob.c @@ -9,6 +9,8 @@ #include "clinic/blob.c.h" #undef clinic_state +#define _pysqlite_Blob_CAST(op) ((pysqlite_Blob *)(op)) + /*[clinic input] module _sqlite3 class _sqlite3.Blob "pysqlite_Blob *" "clinic_state()->BlobType" @@ -29,32 +31,35 @@ close_blob(pysqlite_Blob *self) } static int -blob_traverse(pysqlite_Blob *self, visitproc visit, void *arg) +blob_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->connection); return 0; } static int -blob_clear(pysqlite_Blob *self) +blob_clear(PyObject *op) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); Py_CLEAR(self->connection); return 0; } static void -blob_dealloc(pysqlite_Blob *self) +blob_dealloc(PyObject *op) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); close_blob(self); if (self->in_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); + PyObject_ClearWeakRefs(op); } - tp->tp_clear((PyObject *)self); + (void)tp->tp_clear(op); tp->tp_free(self); Py_DECREF(tp); } @@ -373,8 +378,9 @@ blob_exit_impl(pysqlite_Blob *self, PyObject *type, PyObject *val, } static Py_ssize_t -blob_length(pysqlite_Blob *self) +blob_length(PyObject *op) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); if (!check_blob(self)) { return -1; } @@ -449,8 +455,9 @@ subscript_slice(pysqlite_Blob *self, PyObject *item) } static PyObject * -blob_subscript(pysqlite_Blob *self, PyObject *item) +blob_subscript(PyObject *op, PyObject *item) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); if (!check_blob(self)) { return NULL; } @@ -546,8 +553,9 @@ ass_subscript_slice(pysqlite_Blob *self, PyObject *item, PyObject *value) } static int -blob_ass_subscript(pysqlite_Blob *self, PyObject *item, PyObject *value) +blob_ass_subscript(PyObject *op, PyObject *item, PyObject *value) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); if (!check_blob(self)) { return -1; } diff --git a/Modules/_sqlite/clinic/blob.c.h b/Modules/_sqlite/clinic/blob.c.h index b95ba948aaf97f..921e7cbd7ffcab 100644 --- a/Modules/_sqlite/clinic/blob.c.h +++ b/Modules/_sqlite/clinic/blob.c.h @@ -17,9 +17,9 @@ static PyObject * blob_close_impl(pysqlite_Blob *self); static PyObject * -blob_close(pysqlite_Blob *self, PyObject *Py_UNUSED(ignored)) +blob_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return blob_close_impl(self); + return blob_close_impl((pysqlite_Blob *)self); } PyDoc_STRVAR(blob_read__doc__, @@ -42,7 +42,7 @@ static PyObject * blob_read_impl(pysqlite_Blob *self, int length); static PyObject * -blob_read(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) +blob_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int length = -1; @@ -58,7 +58,7 @@ blob_read(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = blob_read_impl(self, length); + return_value = blob_read_impl((pysqlite_Blob *)self, length); exit: return return_value; @@ -80,7 +80,7 @@ static PyObject * blob_write_impl(pysqlite_Blob *self, Py_buffer *data); static PyObject * -blob_write(pysqlite_Blob *self, PyObject *arg) +blob_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer data = {NULL, NULL}; @@ -88,7 +88,7 @@ blob_write(pysqlite_Blob *self, PyObject *arg) if (PyObject_GetBuffer(arg, &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = blob_write_impl(self, &data); + return_value = blob_write_impl((pysqlite_Blob *)self, &data); exit: /* Cleanup for data */ @@ -116,7 +116,7 @@ static PyObject * blob_seek_impl(pysqlite_Blob *self, int offset, int origin); static PyObject * -blob_seek(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) +blob_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int offset; @@ -137,7 +137,7 @@ blob_seek(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = blob_seek_impl(self, offset, origin); + return_value = blob_seek_impl((pysqlite_Blob *)self, offset, origin); exit: return return_value; @@ -156,9 +156,9 @@ static PyObject * blob_tell_impl(pysqlite_Blob *self); static PyObject * -blob_tell(pysqlite_Blob *self, PyObject *Py_UNUSED(ignored)) +blob_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return blob_tell_impl(self); + return blob_tell_impl((pysqlite_Blob *)self); } PyDoc_STRVAR(blob_enter__doc__, @@ -174,9 +174,9 @@ static PyObject * blob_enter_impl(pysqlite_Blob *self); static PyObject * -blob_enter(pysqlite_Blob *self, PyObject *Py_UNUSED(ignored)) +blob_enter(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return blob_enter_impl(self); + return blob_enter_impl((pysqlite_Blob *)self); } PyDoc_STRVAR(blob_exit__doc__, @@ -193,7 +193,7 @@ blob_exit_impl(pysqlite_Blob *self, PyObject *type, PyObject *val, PyObject *tb); static PyObject * -blob_exit(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) +blob_exit(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *type; @@ -206,9 +206,9 @@ blob_exit(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) type = args[0]; val = args[1]; tb = args[2]; - return_value = blob_exit_impl(self, type, val, tb); + return_value = blob_exit_impl((pysqlite_Blob *)self, type, val, tb); exit: return return_value; } -/*[clinic end generated code: output=31abd55660e0c5af input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f03f4ba622b67ae0 input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index 42eb6eb2f12554..82fba44eb1b074 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -182,7 +182,7 @@ static PyObject * pysqlite_connection_cursor_impl(pysqlite_Connection *self, PyObject *factory); static PyObject * -pysqlite_connection_cursor(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_cursor(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -224,7 +224,7 @@ pysqlite_connection_cursor(pysqlite_Connection *self, PyObject *const *args, Py_ } factory = args[0]; skip_optional_pos: - return_value = pysqlite_connection_cursor_impl(self, factory); + return_value = pysqlite_connection_cursor_impl((pysqlite_Connection *)self, factory); exit: return return_value; @@ -255,7 +255,7 @@ blobopen_impl(pysqlite_Connection *self, const char *table, const char *col, sqlite3_int64 row, int readonly, const char *name); static PyObject * -blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +blobopen(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -351,7 +351,7 @@ blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyO goto exit; } skip_optional_kwonly: - return_value = blobopen_impl(self, table, col, row, readonly, name); + return_value = blobopen_impl((pysqlite_Connection *)self, table, col, row, readonly, name); exit: return return_value; @@ -372,9 +372,9 @@ static PyObject * pysqlite_connection_close_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_close(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_close_impl(self); + return pysqlite_connection_close_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_commit__doc__, @@ -392,9 +392,9 @@ static PyObject * pysqlite_connection_commit_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_commit(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_commit(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_commit_impl(self); + return pysqlite_connection_commit_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_rollback__doc__, @@ -412,9 +412,9 @@ static PyObject * pysqlite_connection_rollback_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_rollback(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_rollback(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_rollback_impl(self); + return pysqlite_connection_rollback_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_create_function__doc__, @@ -449,7 +449,7 @@ pysqlite_connection_create_function_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_create_function(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_create_function(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -525,7 +525,7 @@ pysqlite_connection_create_function(pysqlite_Connection *self, PyTypeObject *cls goto exit; } skip_optional_kwonly: - return_value = pysqlite_connection_create_function_impl(self, cls, name, narg, func, deterministic); + return_value = pysqlite_connection_create_function_impl((pysqlite_Connection *)self, cls, name, narg, func, deterministic); exit: return return_value; @@ -557,7 +557,7 @@ create_window_function_impl(pysqlite_Connection *self, PyTypeObject *cls, PyObject *aggregate_class); static PyObject * -create_window_function(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +create_window_function(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -601,7 +601,7 @@ create_window_function(pysqlite_Connection *self, PyTypeObject *cls, PyObject *c goto exit; } aggregate_class = args[2]; - return_value = create_window_function_impl(self, cls, name, num_params, aggregate_class); + return_value = create_window_function_impl((pysqlite_Connection *)self, cls, name, num_params, aggregate_class); exit: return return_value; @@ -642,7 +642,7 @@ pysqlite_connection_create_aggregate_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_create_aggregate(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_create_aggregate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -708,7 +708,7 @@ pysqlite_connection_create_aggregate(pysqlite_Connection *self, PyTypeObject *cl goto exit; } aggregate_class = args[2]; - return_value = pysqlite_connection_create_aggregate_impl(self, cls, name, n_arg, aggregate_class); + return_value = pysqlite_connection_create_aggregate_impl((pysqlite_Connection *)self, cls, name, n_arg, aggregate_class); exit: return return_value; @@ -745,7 +745,7 @@ pysqlite_connection_set_authorizer_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_set_authorizer(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_set_authorizer(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -792,7 +792,7 @@ pysqlite_connection_set_authorizer(pysqlite_Connection *self, PyTypeObject *cls, } } callable = args[0]; - return_value = pysqlite_connection_set_authorizer_impl(self, cls, callable); + return_value = pysqlite_connection_set_authorizer_impl((pysqlite_Connection *)self, cls, callable); exit: return return_value; @@ -839,7 +839,7 @@ pysqlite_connection_set_progress_handler_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_set_progress_handler(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_set_progress_handler(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -891,7 +891,7 @@ pysqlite_connection_set_progress_handler(pysqlite_Connection *self, PyTypeObject if (n == -1 && PyErr_Occurred()) { goto exit; } - return_value = pysqlite_connection_set_progress_handler_impl(self, cls, callable, n); + return_value = pysqlite_connection_set_progress_handler_impl((pysqlite_Connection *)self, cls, callable, n); exit: return return_value; @@ -928,7 +928,7 @@ pysqlite_connection_set_trace_callback_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_set_trace_callback(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_set_trace_callback(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -975,7 +975,7 @@ pysqlite_connection_set_trace_callback(pysqlite_Connection *self, PyTypeObject * } } callable = args[0]; - return_value = pysqlite_connection_set_trace_callback_impl(self, cls, callable); + return_value = pysqlite_connection_set_trace_callback_impl((pysqlite_Connection *)self, cls, callable); exit: return return_value; @@ -997,7 +997,7 @@ pysqlite_connection_enable_load_extension_impl(pysqlite_Connection *self, int onoff); static PyObject * -pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *arg) +pysqlite_connection_enable_load_extension(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int onoff; @@ -1006,7 +1006,7 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a if (onoff < 0) { goto exit; } - return_value = pysqlite_connection_enable_load_extension_impl(self, onoff); + return_value = pysqlite_connection_enable_load_extension_impl((pysqlite_Connection *)self, onoff); exit: return return_value; @@ -1031,7 +1031,7 @@ pysqlite_connection_load_extension_impl(pysqlite_Connection *self, const char *entrypoint); static PyObject * -pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_load_extension(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1104,7 +1104,7 @@ pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *a goto exit; } skip_optional_kwonly: - return_value = pysqlite_connection_load_extension_impl(self, extension_name, entrypoint); + return_value = pysqlite_connection_load_extension_impl((pysqlite_Connection *)self, extension_name, entrypoint); exit: return return_value; @@ -1126,7 +1126,7 @@ pysqlite_connection_execute_impl(pysqlite_Connection *self, PyObject *sql, PyObject *parameters); static PyObject * -pysqlite_connection_execute(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_connection_execute(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -1145,7 +1145,7 @@ pysqlite_connection_execute(pysqlite_Connection *self, PyObject *const *args, Py } parameters = args[1]; skip_optional: - return_value = pysqlite_connection_execute_impl(self, sql, parameters); + return_value = pysqlite_connection_execute_impl((pysqlite_Connection *)self, sql, parameters); exit: return return_value; @@ -1165,7 +1165,7 @@ pysqlite_connection_executemany_impl(pysqlite_Connection *self, PyObject *sql, PyObject *parameters); static PyObject * -pysqlite_connection_executemany(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_connection_executemany(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -1180,7 +1180,7 @@ pysqlite_connection_executemany(pysqlite_Connection *self, PyObject *const *args } sql = args[0]; parameters = args[1]; - return_value = pysqlite_connection_executemany_impl(self, sql, parameters); + return_value = pysqlite_connection_executemany_impl((pysqlite_Connection *)self, sql, parameters); exit: return return_value; @@ -1208,9 +1208,9 @@ static PyObject * pysqlite_connection_interrupt_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_interrupt(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_interrupt(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_interrupt_impl(self); + return pysqlite_connection_interrupt_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_iterdump__doc__, @@ -1230,7 +1230,7 @@ pysqlite_connection_iterdump_impl(pysqlite_Connection *self, PyObject *filter); static PyObject * -pysqlite_connection_iterdump(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_iterdump(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1272,7 +1272,7 @@ pysqlite_connection_iterdump(pysqlite_Connection *self, PyObject *const *args, P } filter = args[0]; skip_optional_kwonly: - return_value = pysqlite_connection_iterdump_impl(self, filter); + return_value = pysqlite_connection_iterdump_impl((pysqlite_Connection *)self, filter); exit: return return_value; @@ -1295,7 +1295,7 @@ pysqlite_connection_backup_impl(pysqlite_Connection *self, double sleep); static PyObject * -pysqlite_connection_backup(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_backup(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1388,7 +1388,7 @@ pysqlite_connection_backup(pysqlite_Connection *self, PyObject *const *args, Py_ } } skip_optional_kwonly: - return_value = pysqlite_connection_backup_impl(self, target, pages, progress, name, sleep); + return_value = pysqlite_connection_backup_impl((pysqlite_Connection *)self, target, pages, progress, name, sleep); exit: return return_value; @@ -1410,7 +1410,7 @@ pysqlite_connection_create_collation_impl(pysqlite_Connection *self, PyObject *callable); static PyObject * -pysqlite_connection_create_collation(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_create_collation(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1449,7 +1449,7 @@ pysqlite_connection_create_collation(pysqlite_Connection *self, PyTypeObject *cl goto exit; } callable = args[1]; - return_value = pysqlite_connection_create_collation_impl(self, cls, name, callable); + return_value = pysqlite_connection_create_collation_impl((pysqlite_Connection *)self, cls, name, callable); exit: return return_value; @@ -1478,7 +1478,7 @@ static PyObject * serialize_impl(pysqlite_Connection *self, const char *name); static PyObject * -serialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +serialize(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1532,7 +1532,7 @@ serialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, Py goto exit; } skip_optional_kwonly: - return_value = serialize_impl(self, name); + return_value = serialize_impl((pysqlite_Connection *)self, name); exit: return return_value; @@ -1568,7 +1568,7 @@ deserialize_impl(pysqlite_Connection *self, Py_buffer *data, const char *name); static PyObject * -deserialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +deserialize(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1638,7 +1638,7 @@ deserialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } skip_optional_kwonly: - return_value = deserialize_impl(self, &data, name); + return_value = deserialize_impl((pysqlite_Connection *)self, &data, name); exit: /* Cleanup for data */ @@ -1666,9 +1666,9 @@ static PyObject * pysqlite_connection_enter_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_enter(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_enter(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_enter_impl(self); + return pysqlite_connection_enter_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_exit__doc__, @@ -1687,7 +1687,7 @@ pysqlite_connection_exit_impl(pysqlite_Connection *self, PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb); static PyObject * -pysqlite_connection_exit(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_connection_exit(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type; @@ -1700,7 +1700,7 @@ pysqlite_connection_exit(pysqlite_Connection *self, PyObject *const *args, Py_ss exc_type = args[0]; exc_value = args[1]; exc_tb = args[2]; - return_value = pysqlite_connection_exit_impl(self, exc_type, exc_value, exc_tb); + return_value = pysqlite_connection_exit_impl((pysqlite_Connection *)self, exc_type, exc_value, exc_tb); exit: return return_value; @@ -1729,7 +1729,7 @@ static PyObject * setlimit_impl(pysqlite_Connection *self, int category, int limit); static PyObject * -setlimit(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +setlimit(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int category; @@ -1746,7 +1746,7 @@ setlimit(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) if (limit == -1 && PyErr_Occurred()) { goto exit; } - return_value = setlimit_impl(self, category, limit); + return_value = setlimit_impl((pysqlite_Connection *)self, category, limit); exit: return return_value; @@ -1768,7 +1768,7 @@ static PyObject * getlimit_impl(pysqlite_Connection *self, int category); static PyObject * -getlimit(pysqlite_Connection *self, PyObject *arg) +getlimit(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int category; @@ -1777,7 +1777,7 @@ getlimit(pysqlite_Connection *self, PyObject *arg) if (category == -1 && PyErr_Occurred()) { goto exit; } - return_value = getlimit_impl(self, category); + return_value = getlimit_impl((pysqlite_Connection *)self, category); exit: return return_value; @@ -1799,7 +1799,7 @@ static PyObject * setconfig_impl(pysqlite_Connection *self, int op, int enable); static PyObject * -setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +setconfig(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int op; @@ -1820,7 +1820,7 @@ setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = setconfig_impl(self, op, enable); + return_value = setconfig_impl((pysqlite_Connection *)self, op, enable); exit: return return_value; @@ -1842,7 +1842,7 @@ static int getconfig_impl(pysqlite_Connection *self, int op); static PyObject * -getconfig(pysqlite_Connection *self, PyObject *arg) +getconfig(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int op; @@ -1852,7 +1852,7 @@ getconfig(pysqlite_Connection *self, PyObject *arg) if (op == -1 && PyErr_Occurred()) { goto exit; } - _return_value = getconfig_impl(self, op); + _return_value = getconfig_impl((pysqlite_Connection *)self, op); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -1881,4 +1881,4 @@ getconfig(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=a8fd19301c7390cc input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c59effb407b8ea4d input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/clinic/cursor.c.h b/Modules/_sqlite/clinic/cursor.c.h index ca7823cf5aef5b..590e429e9139f1 100644 --- a/Modules/_sqlite/clinic/cursor.c.h +++ b/Modules/_sqlite/clinic/cursor.c.h @@ -52,7 +52,7 @@ pysqlite_cursor_execute_impl(pysqlite_Cursor *self, PyObject *sql, PyObject *parameters); static PyObject * -pysqlite_cursor_execute(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_cursor_execute(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -71,7 +71,7 @@ pysqlite_cursor_execute(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t } parameters = args[1]; skip_optional: - return_value = pysqlite_cursor_execute_impl(self, sql, parameters); + return_value = pysqlite_cursor_execute_impl((pysqlite_Cursor *)self, sql, parameters); exit: return return_value; @@ -91,7 +91,7 @@ pysqlite_cursor_executemany_impl(pysqlite_Cursor *self, PyObject *sql, PyObject *seq_of_parameters); static PyObject * -pysqlite_cursor_executemany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_cursor_executemany(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -106,7 +106,7 @@ pysqlite_cursor_executemany(pysqlite_Cursor *self, PyObject *const *args, Py_ssi } sql = args[0]; seq_of_parameters = args[1]; - return_value = pysqlite_cursor_executemany_impl(self, sql, seq_of_parameters); + return_value = pysqlite_cursor_executemany_impl((pysqlite_Cursor *)self, sql, seq_of_parameters); exit: return return_value; @@ -126,7 +126,7 @@ pysqlite_cursor_executescript_impl(pysqlite_Cursor *self, const char *sql_script); static PyObject * -pysqlite_cursor_executescript(pysqlite_Cursor *self, PyObject *arg) +pysqlite_cursor_executescript(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *sql_script; @@ -144,7 +144,7 @@ pysqlite_cursor_executescript(pysqlite_Cursor *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = pysqlite_cursor_executescript_impl(self, sql_script); + return_value = pysqlite_cursor_executescript_impl((pysqlite_Cursor *)self, sql_script); exit: return return_value; @@ -163,9 +163,9 @@ static PyObject * pysqlite_cursor_fetchone_impl(pysqlite_Cursor *self); static PyObject * -pysqlite_cursor_fetchone(pysqlite_Cursor *self, PyObject *Py_UNUSED(ignored)) +pysqlite_cursor_fetchone(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_cursor_fetchone_impl(self); + return pysqlite_cursor_fetchone_impl((pysqlite_Cursor *)self); } PyDoc_STRVAR(pysqlite_cursor_fetchmany__doc__, @@ -184,7 +184,7 @@ static PyObject * pysqlite_cursor_fetchmany_impl(pysqlite_Cursor *self, int maxrows); static PyObject * -pysqlite_cursor_fetchmany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_cursor_fetchmany(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -214,7 +214,7 @@ pysqlite_cursor_fetchmany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize #undef KWTUPLE PyObject *argsbuf[1]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - int maxrows = self->arraysize; + int maxrows = ((pysqlite_Cursor *)self)->arraysize; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, /*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf); @@ -229,7 +229,7 @@ pysqlite_cursor_fetchmany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize goto exit; } skip_optional_pos: - return_value = pysqlite_cursor_fetchmany_impl(self, maxrows); + return_value = pysqlite_cursor_fetchmany_impl((pysqlite_Cursor *)self, maxrows); exit: return return_value; @@ -248,9 +248,9 @@ static PyObject * pysqlite_cursor_fetchall_impl(pysqlite_Cursor *self); static PyObject * -pysqlite_cursor_fetchall(pysqlite_Cursor *self, PyObject *Py_UNUSED(ignored)) +pysqlite_cursor_fetchall(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_cursor_fetchall_impl(self); + return pysqlite_cursor_fetchall_impl((pysqlite_Cursor *)self); } PyDoc_STRVAR(pysqlite_cursor_setinputsizes__doc__, @@ -276,7 +276,7 @@ pysqlite_cursor_setoutputsize_impl(pysqlite_Cursor *self, PyObject *size, PyObject *column); static PyObject * -pysqlite_cursor_setoutputsize(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_cursor_setoutputsize(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *size; @@ -291,7 +291,7 @@ pysqlite_cursor_setoutputsize(pysqlite_Cursor *self, PyObject *const *args, Py_s } column = args[1]; skip_optional: - return_value = pysqlite_cursor_setoutputsize_impl(self, size, column); + return_value = pysqlite_cursor_setoutputsize_impl((pysqlite_Cursor *)self, size, column); exit: return return_value; @@ -310,8 +310,8 @@ static PyObject * pysqlite_cursor_close_impl(pysqlite_Cursor *self); static PyObject * -pysqlite_cursor_close(pysqlite_Cursor *self, PyObject *Py_UNUSED(ignored)) +pysqlite_cursor_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_cursor_close_impl(self); + return pysqlite_cursor_close_impl((pysqlite_Cursor *)self); } -/*[clinic end generated code: output=f0804afc5f8646c1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=82620ca7622b547c input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/clinic/row.c.h b/Modules/_sqlite/clinic/row.c.h index e8d1dbf2ba8bc9..068906744e445f 100644 --- a/Modules/_sqlite/clinic/row.c.h +++ b/Modules/_sqlite/clinic/row.c.h @@ -52,8 +52,8 @@ static PyObject * pysqlite_row_keys_impl(pysqlite_Row *self); static PyObject * -pysqlite_row_keys(pysqlite_Row *self, PyObject *Py_UNUSED(ignored)) +pysqlite_row_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_row_keys_impl(self); + return pysqlite_row_keys_impl((pysqlite_Row *)self); } -/*[clinic end generated code: output=788bf817acc02b8e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6c1acbb48f386468 input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index fc03e4a085c179..16afd7eada113f 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -34,7 +34,6 @@ #include "prepare_protocol.h" #include "util.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() #include "pycore_modsupport.h" // _PyArg_NoKeywords() #include "pycore_pyerrors.h" // _PyErr_ChainExceptions1() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() @@ -136,6 +135,8 @@ sqlite3_int64_converter(PyObject *obj, sqlite3_int64 *result) #include "clinic/connection.c.h" #undef clinic_state +#define _pysqlite_Connection_CAST(op) ((pysqlite_Connection *)(op)) + /*[clinic input] module _sqlite3 class _sqlite3.Connection "pysqlite_Connection *" "clinic_state()->ConnectionType" @@ -385,8 +386,9 @@ do { \ } while (0) static int -connection_traverse(pysqlite_Connection *self, visitproc visit, void *arg) +connection_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->statement_cache); Py_VISIT(self->cursors); @@ -410,8 +412,9 @@ clear_callback_context(callback_context *ctx) } static int -connection_clear(pysqlite_Connection *self) +connection_clear(PyObject *op) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); Py_CLEAR(self->statement_cache); Py_CLEAR(self->cursors); Py_CLEAR(self->blobs); @@ -494,7 +497,8 @@ connection_finalize(PyObject *self) if (PyErr_ResourceWarning(self, 1, "unclosed database in %R", self)) { /* Spurious errors can appear at shutdown */ if (PyErr_ExceptionMatches(PyExc_Warning)) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while finalizing " + "database connection %R", self); } } } @@ -503,7 +507,8 @@ connection_finalize(PyObject *self) PyErr_Clear(); } else { - PyErr_WriteUnraisable((PyObject *)self); + PyErr_FormatUnraisable("Exception ignored while closing database %R", + self); } } @@ -518,7 +523,7 @@ connection_dealloc(PyObject *self) } PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - tp->tp_clear(self); + (void)tp->tp_clear(self); tp->tp_free(self); Py_DECREF(tp); } @@ -890,7 +895,8 @@ print_or_clear_traceback(callback_context *ctx) assert(ctx != NULL); assert(ctx->state != NULL); if (ctx->state->enable_callback_tracebacks) { - PyErr_WriteUnraisable(ctx->callable); + PyErr_FormatUnraisable("Exception ignored on sqlite3 callback %R", + ctx->callable); } else { PyErr_Clear(); @@ -958,6 +964,11 @@ step_callback(sqlite3_context *context, int argc, sqlite3_value **params) assert(ctx != NULL); aggregate_instance = (PyObject**)sqlite3_aggregate_context(context, sizeof(PyObject*)); + if (aggregate_instance == NULL) { + (void)PyErr_NoMemory(); + set_sqlite_error(context, "unable to allocate SQLite aggregate context"); + goto error; + } if (*aggregate_instance == NULL) { *aggregate_instance = PyObject_CallNoArgs(ctx->callable); if (!*aggregate_instance) { @@ -1711,8 +1722,10 @@ int pysqlite_check_thread(pysqlite_Connection* self) return 1; } -static PyObject* pysqlite_connection_get_isolation_level(pysqlite_Connection* self, void* unused) +static PyObject * +pysqlite_connection_get_isolation_level(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_connection(self)) { return NULL; } @@ -1722,16 +1735,20 @@ static PyObject* pysqlite_connection_get_isolation_level(pysqlite_Connection* se Py_RETURN_NONE; } -static PyObject* pysqlite_connection_get_total_changes(pysqlite_Connection* self, void* unused) +static PyObject * +pysqlite_connection_get_total_changes(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_connection(self)) { return NULL; } return PyLong_FromLong(sqlite3_total_changes(self->db)); } -static PyObject* pysqlite_connection_get_in_transaction(pysqlite_Connection* self, void* unused) +static PyObject * +pysqlite_connection_get_in_transaction(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_connection(self)) { return NULL; } @@ -1742,8 +1759,11 @@ static PyObject* pysqlite_connection_get_in_transaction(pysqlite_Connection* sel } static int -pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* isolation_level, void *Py_UNUSED(ignored)) +pysqlite_connection_set_isolation_level(PyObject *op, + PyObject *isolation_level, + void *Py_UNUSED(ignored)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (isolation_level == NULL) { PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); return -1; @@ -1766,11 +1786,11 @@ pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* iso } static PyObject * -pysqlite_connection_call(pysqlite_Connection *self, PyObject *args, - PyObject *kwargs) +pysqlite_connection_call(PyObject *op, PyObject *args, PyObject *kwargs) { PyObject* sql; pysqlite_Statement* statement; + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return NULL; @@ -1995,7 +2015,7 @@ pysqlite_connection_iterdump_impl(pysqlite_Connection *self, return NULL; } - PyObject *iterdump = _PyImport_GetModuleAttrString(MODULE_NAME ".dump", "_iterdump"); + PyObject *iterdump = PyImport_ImportModuleAttrString(MODULE_NAME ".dump", "_iterdump"); if (!iterdump) { if (!PyErr_Occurred()) { PyErr_SetString(self->OperationalError, @@ -2521,8 +2541,9 @@ getconfig_impl(pysqlite_Connection *self, int op) } static PyObject * -get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) +get_autocommit(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return NULL; } @@ -2536,8 +2557,9 @@ get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) } static int -set_autocommit(pysqlite_Connection *self, PyObject *val, void *Py_UNUSED(ctx)) +set_autocommit(PyObject *op, PyObject *val, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return -1; } @@ -2562,7 +2584,7 @@ set_autocommit(pysqlite_Connection *self, PyObject *val, void *Py_UNUSED(ctx)) } static PyObject * -get_sig(PyObject *self, void *Py_UNUSED(ctx)) +get_sig(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyUnicode_FromString("(sql, /)"); } @@ -2572,11 +2594,12 @@ static const char connection_doc[] = PyDoc_STR("SQLite database connection object."); static PyGetSetDef connection_getset[] = { - {"isolation_level", (getter)pysqlite_connection_get_isolation_level, (setter)pysqlite_connection_set_isolation_level}, - {"total_changes", (getter)pysqlite_connection_get_total_changes, (setter)0}, - {"in_transaction", (getter)pysqlite_connection_get_in_transaction, (setter)0}, - {"autocommit", (getter)get_autocommit, (setter)set_autocommit}, - {"__text_signature__", get_sig, (setter)0}, + {"isolation_level", pysqlite_connection_get_isolation_level, + pysqlite_connection_set_isolation_level}, + {"total_changes", pysqlite_connection_get_total_changes, NULL}, + {"in_transaction", pysqlite_connection_get_in_transaction, NULL}, + {"autocommit", get_autocommit, set_autocommit}, + {"__text_signature__", get_sig, NULL}, {NULL} }; diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index 0fbd408f18cf6a..02d598040775b0 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -44,6 +44,8 @@ typedef enum { #include "clinic/cursor.c.h" #undef clinic_state +#define _pysqlite_Cursor_CAST(op) ((pysqlite_Cursor *)(op)) + static inline int check_cursor_locked(pysqlite_Cursor *cur) { @@ -146,8 +148,9 @@ stmt_reset(pysqlite_Statement *self) } static int -cursor_traverse(pysqlite_Cursor *self, visitproc visit, void *arg) +cursor_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->connection); Py_VISIT(self->description); @@ -159,8 +162,9 @@ cursor_traverse(pysqlite_Cursor *self, visitproc visit, void *arg) } static int -cursor_clear(pysqlite_Cursor *self) +cursor_clear(PyObject *op) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); Py_CLEAR(self->connection); Py_CLEAR(self->description); Py_CLEAR(self->row_cast_map); @@ -176,14 +180,15 @@ cursor_clear(pysqlite_Cursor *self) } static void -cursor_dealloc(pysqlite_Cursor *self) +cursor_dealloc(PyObject *op) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); if (self->in_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); + PyObject_ClearWeakRefs(op); } - tp->tp_clear((PyObject *)self); + (void)tp->tp_clear(op); tp->tp_free(self); Py_DECREF(tp); } @@ -1087,8 +1092,9 @@ pysqlite_cursor_executescript_impl(pysqlite_Cursor *self, } static PyObject * -pysqlite_cursor_iternext(pysqlite_Cursor *self) +pysqlite_cursor_iternext(PyObject *op) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); if (!check_cursor(self)) { return NULL; } @@ -1125,7 +1131,7 @@ pysqlite_cursor_iternext(pysqlite_Cursor *self) } if (!Py_IsNone(self->row_factory)) { PyObject *factory = self->row_factory; - PyObject *args[] = { (PyObject *)self, row, }; + PyObject *args[] = { op, row, }; PyObject *new_row = PyObject_Vectorcall(factory, args, 2, NULL); Py_SETREF(row, new_row); } @@ -1144,7 +1150,7 @@ pysqlite_cursor_fetchone_impl(pysqlite_Cursor *self) { PyObject* row; - row = pysqlite_cursor_iternext(self); + row = pysqlite_cursor_iternext((PyObject *)self); if (!row && !PyErr_Occurred()) { Py_RETURN_NONE; } @@ -1155,7 +1161,7 @@ pysqlite_cursor_fetchone_impl(pysqlite_Cursor *self) /*[clinic input] _sqlite3.Cursor.fetchmany as pysqlite_cursor_fetchmany - size as maxrows: int(c_default='self->arraysize') = 1 + size as maxrows: int(c_default='((pysqlite_Cursor *)self)->arraysize') = 1 The default value is set by the Cursor.arraysize attribute. Fetches several rows from the resultset. @@ -1163,7 +1169,7 @@ Fetches several rows from the resultset. static PyObject * pysqlite_cursor_fetchmany_impl(pysqlite_Cursor *self, int maxrows) -/*[clinic end generated code: output=a8ef31fea64d0906 input=c26e6ca3f34debd0]*/ +/*[clinic end generated code: output=a8ef31fea64d0906 input=035dbe44a1005bf2]*/ { PyObject* row; PyObject* list; @@ -1174,7 +1180,7 @@ pysqlite_cursor_fetchmany_impl(pysqlite_Cursor *self, int maxrows) return NULL; } - while ((row = pysqlite_cursor_iternext(self))) { + while ((row = pysqlite_cursor_iternext((PyObject *)self))) { if (PyList_Append(list, row) < 0) { Py_DECREF(row); break; @@ -1212,7 +1218,7 @@ pysqlite_cursor_fetchall_impl(pysqlite_Cursor *self) return NULL; } - while ((row = pysqlite_cursor_iternext(self))) { + while ((row = pysqlite_cursor_iternext((PyObject *)self))) { if (PyList_Append(list, row) < 0) { Py_DECREF(row); break; diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 698e81d9b897d0..27e8dab92e0e67 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -33,8 +33,6 @@ #include "row.h" #include "blob.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() - #if SQLITE_VERSION_NUMBER < 3015002 #error "SQLite 3.15.2 or higher required" #endif @@ -234,7 +232,7 @@ static int load_functools_lru_cache(PyObject *module) { pysqlite_state *state = pysqlite_get_state(module); - state->lru_cache = _PyImport_GetModuleAttrString("functools", "lru_cache"); + state->lru_cache = PyImport_ImportModuleAttrString("functools", "lru_cache"); if (state->lru_cache == NULL) { return -1; } @@ -619,7 +617,7 @@ module_clear(PyObject *module) static void module_free(void *module) { - module_clear((PyObject *)module); + (void)module_clear((PyObject *)module); } #define ADD_TYPE(module, type) \ diff --git a/Modules/_sqlite/prepare_protocol.c b/Modules/_sqlite/prepare_protocol.c index 44533225665dab..31092417cb480d 100644 --- a/Modules/_sqlite/prepare_protocol.c +++ b/Modules/_sqlite/prepare_protocol.c @@ -24,8 +24,7 @@ #include "prepare_protocol.h" static int -pysqlite_prepare_protocol_init(pysqlite_PrepareProtocol *self, PyObject *args, - PyObject *kwargs) +pysqlite_prepare_protocol_init(PyObject *self, PyObject *args, PyObject *kwargs) { return 0; } @@ -38,7 +37,7 @@ pysqlite_prepare_protocol_traverse(PyObject *self, visitproc visit, void *arg) } static void -pysqlite_prepare_protocol_dealloc(pysqlite_PrepareProtocol *self) +pysqlite_prepare_protocol_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c index 14555076a7e79a..79660008b180dc 100644 --- a/Modules/_sqlite/row.c +++ b/Modules/_sqlite/row.c @@ -32,6 +32,8 @@ #include "clinic/row.c.h" #undef clinic_state +#define _pysqlite_Row_CAST(op) ((pysqlite_Row *)(op)) + /*[clinic input] module _sqlite3 class _sqlite3.Row "pysqlite_Row *" "clinic_state()->RowType" @@ -39,16 +41,18 @@ class _sqlite3.Row "pysqlite_Row *" "clinic_state()->RowType" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=966c53403d7f3a40]*/ static int -row_clear(pysqlite_Row *self) +row_clear(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); Py_CLEAR(self->data); Py_CLEAR(self->description); return 0; } static int -row_traverse(pysqlite_Row *self, visitproc visit, void *arg) +row_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->data); Py_VISIT(self->description); @@ -60,7 +64,7 @@ pysqlite_row_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - tp->tp_clear(self); + (void)tp->tp_clear(self); tp->tp_free(self); Py_DECREF(tp); } @@ -94,10 +98,12 @@ pysqlite_row_new_impl(PyTypeObject *type, pysqlite_Cursor *cursor, return (PyObject *) self; } -PyObject* pysqlite_row_item(pysqlite_Row* self, Py_ssize_t idx) +static PyObject * +pysqlite_row_item(PyObject *op, Py_ssize_t idx) { - PyObject *item = PyTuple_GetItem(self->data, idx); - return Py_XNewRef(item); + pysqlite_Row *self = _pysqlite_Row_CAST(op); + PyObject *item = PyTuple_GetItem(self->data, idx); + return Py_XNewRef(item); } static int @@ -129,10 +135,11 @@ equal_ignore_case(PyObject *left, PyObject *right) } static PyObject * -pysqlite_row_subscript(pysqlite_Row *self, PyObject *idx) +pysqlite_row_subscript(PyObject *op, PyObject *idx) { Py_ssize_t _idx; Py_ssize_t nitems, i; + pysqlite_Row *self = _pysqlite_Row_CAST(op); if (PyLong_Check(idx)) { _idx = PyNumber_AsSsize_t(idx, PyExc_IndexError); @@ -174,8 +181,9 @@ pysqlite_row_subscript(pysqlite_Row *self, PyObject *idx) } static Py_ssize_t -pysqlite_row_length(pysqlite_Row* self) +pysqlite_row_length(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); return PyTuple_GET_SIZE(self->data); } @@ -208,24 +216,30 @@ pysqlite_row_keys_impl(pysqlite_Row *self) return list; } -static PyObject* pysqlite_iter(pysqlite_Row* self) +static PyObject * +pysqlite_iter(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); return PyObject_GetIter(self->data); } -static Py_hash_t pysqlite_row_hash(pysqlite_Row *self) +static Py_hash_t +pysqlite_row_hash(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); return PyObject_Hash(self->description) ^ PyObject_Hash(self->data); } -static PyObject* pysqlite_row_richcompare(pysqlite_Row *self, PyObject *_other, int opid) +static PyObject * +pysqlite_row_richcompare(PyObject *op, PyObject *opother, int opid) { if (opid != Py_EQ && opid != Py_NE) Py_RETURN_NOTIMPLEMENTED; + pysqlite_Row *self = _pysqlite_Row_CAST(op); pysqlite_state *state = pysqlite_get_state_by_type(Py_TYPE(self)); - if (PyObject_TypeCheck(_other, state->RowType)) { - pysqlite_Row *other = (pysqlite_Row *)_other; + if (PyObject_TypeCheck(opother, state->RowType)) { + pysqlite_Row *other = (pysqlite_Row *)opother; int eq = PyObject_RichCompareBool(self->description, other->description, Py_EQ); if (eq < 0) { return NULL; diff --git a/Modules/_sqlite/statement.c b/Modules/_sqlite/statement.c index 229bfc3b504165..facced0dfbfafd 100644 --- a/Modules/_sqlite/statement.c +++ b/Modules/_sqlite/statement.c @@ -25,6 +25,8 @@ #include "statement.h" #include "util.h" +#define _pysqlite_Statement_CAST(op) ((pysqlite_Statement *)(op)) + /* prototypes */ static const char *lstrip_sql(const char *sql); @@ -99,10 +101,11 @@ pysqlite_statement_create(pysqlite_Connection *connection, PyObject *sql) } static void -stmt_dealloc(pysqlite_Statement *self) +stmt_dealloc(PyObject *op) { + pysqlite_Statement *self = _pysqlite_Statement_CAST(op); PyTypeObject *tp = Py_TYPE(self); - PyObject_GC_UnTrack(self); + PyObject_GC_UnTrack(op); if (self->st) { Py_BEGIN_ALLOW_THREADS sqlite3_finalize(self->st); @@ -114,7 +117,7 @@ stmt_dealloc(pysqlite_Statement *self) } static int -stmt_traverse(pysqlite_Statement *self, visitproc visit, void *arg) +stmt_traverse(PyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return 0; diff --git a/Modules/_sre/clinic/sre.c.h b/Modules/_sre/clinic/sre.c.h index 87e4785a428468..cfc6813f37f012 100644 --- a/Modules/_sre/clinic/sre.c.h +++ b/Modules/_sre/clinic/sre.c.h @@ -179,7 +179,7 @@ _sre_SRE_Pattern_match_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_match(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_match(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -252,7 +252,7 @@ _sre_SRE_Pattern_match(PatternObject *self, PyTypeObject *cls, PyObject *const * endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_match_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_match_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -273,7 +273,7 @@ _sre_SRE_Pattern_fullmatch_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_fullmatch(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_fullmatch(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -346,7 +346,7 @@ _sre_SRE_Pattern_fullmatch(PatternObject *self, PyTypeObject *cls, PyObject *con endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_fullmatch_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_fullmatch_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -369,7 +369,7 @@ _sre_SRE_Pattern_search_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_search(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_search(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -442,7 +442,7 @@ _sre_SRE_Pattern_search(PatternObject *self, PyTypeObject *cls, PyObject *const endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_search_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_search_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -462,7 +462,7 @@ _sre_SRE_Pattern_findall_impl(PatternObject *self, PyObject *string, Py_ssize_t pos, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_findall(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -535,7 +535,7 @@ _sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_findall_impl(self, string, pos, endpos); + return_value = _sre_SRE_Pattern_findall_impl((PatternObject *)self, string, pos, endpos); exit: return return_value; @@ -558,7 +558,7 @@ _sre_SRE_Pattern_finditer_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_finditer(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_finditer(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -631,7 +631,7 @@ _sre_SRE_Pattern_finditer(PatternObject *self, PyTypeObject *cls, PyObject *cons endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_finditer_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_finditer_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -651,7 +651,7 @@ _sre_SRE_Pattern_scanner_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_scanner(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_scanner(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -724,7 +724,7 @@ _sre_SRE_Pattern_scanner(PatternObject *self, PyTypeObject *cls, PyObject *const endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_scanner_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_scanner_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -744,7 +744,7 @@ _sre_SRE_Pattern_split_impl(PatternObject *self, PyObject *string, Py_ssize_t maxsplit); static PyObject * -_sre_SRE_Pattern_split(PatternObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -799,7 +799,7 @@ _sre_SRE_Pattern_split(PatternObject *self, PyObject *const *args, Py_ssize_t na maxsplit = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_split_impl(self, string, maxsplit); + return_value = _sre_SRE_Pattern_split_impl((PatternObject *)self, string, maxsplit); exit: return return_value; @@ -819,7 +819,7 @@ _sre_SRE_Pattern_sub_impl(PatternObject *self, PyTypeObject *cls, PyObject *repl, PyObject *string, Py_ssize_t count); static PyObject * -_sre_SRE_Pattern_sub(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_sub(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -876,7 +876,7 @@ _sre_SRE_Pattern_sub(PatternObject *self, PyTypeObject *cls, PyObject *const *ar count = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_sub_impl(self, cls, repl, string, count); + return_value = _sre_SRE_Pattern_sub_impl((PatternObject *)self, cls, repl, string, count); exit: return return_value; @@ -897,7 +897,7 @@ _sre_SRE_Pattern_subn_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t count); static PyObject * -_sre_SRE_Pattern_subn(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_subn(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -954,7 +954,7 @@ _sre_SRE_Pattern_subn(PatternObject *self, PyTypeObject *cls, PyObject *const *a count = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_subn_impl(self, cls, repl, string, count); + return_value = _sre_SRE_Pattern_subn_impl((PatternObject *)self, cls, repl, string, count); exit: return return_value; @@ -972,9 +972,9 @@ static PyObject * _sre_SRE_Pattern___copy___impl(PatternObject *self); static PyObject * -_sre_SRE_Pattern___copy__(PatternObject *self, PyObject *Py_UNUSED(ignored)) +_sre_SRE_Pattern___copy__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sre_SRE_Pattern___copy___impl(self); + return _sre_SRE_Pattern___copy___impl((PatternObject *)self); } PyDoc_STRVAR(_sre_SRE_Pattern___deepcopy____doc__, @@ -1001,7 +1001,7 @@ _sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, PyObject *exception); static PyObject * -_sre_SRE_Pattern__fail_after(PatternObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Pattern__fail_after(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int count; @@ -1015,7 +1015,7 @@ _sre_SRE_Pattern__fail_after(PatternObject *self, PyObject *const *args, Py_ssiz goto exit; } exception = args[1]; - return_value = _sre_SRE_Pattern__fail_after_impl(self, count, exception); + return_value = _sre_SRE_Pattern__fail_after_impl((PatternObject *)self, count, exception); exit: return return_value; @@ -1169,7 +1169,7 @@ static PyObject * _sre_SRE_Match_expand_impl(MatchObject *self, PyObject *template); static PyObject * -_sre_SRE_Match_expand(MatchObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Match_expand(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1206,7 +1206,7 @@ _sre_SRE_Match_expand(MatchObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } template = args[0]; - return_value = _sre_SRE_Match_expand_impl(self, template); + return_value = _sre_SRE_Match_expand_impl((MatchObject *)self, template); exit: return return_value; @@ -1228,7 +1228,7 @@ static PyObject * _sre_SRE_Match_groups_impl(MatchObject *self, PyObject *default_value); static PyObject * -_sre_SRE_Match_groups(MatchObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Match_groups(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1270,7 +1270,7 @@ _sre_SRE_Match_groups(MatchObject *self, PyObject *const *args, Py_ssize_t nargs } default_value = args[0]; skip_optional_pos: - return_value = _sre_SRE_Match_groups_impl(self, default_value); + return_value = _sre_SRE_Match_groups_impl((MatchObject *)self, default_value); exit: return return_value; @@ -1292,7 +1292,7 @@ static PyObject * _sre_SRE_Match_groupdict_impl(MatchObject *self, PyObject *default_value); static PyObject * -_sre_SRE_Match_groupdict(MatchObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Match_groupdict(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1334,7 +1334,7 @@ _sre_SRE_Match_groupdict(MatchObject *self, PyObject *const *args, Py_ssize_t na } default_value = args[0]; skip_optional_pos: - return_value = _sre_SRE_Match_groupdict_impl(self, default_value); + return_value = _sre_SRE_Match_groupdict_impl((MatchObject *)self, default_value); exit: return return_value; @@ -1353,7 +1353,7 @@ static Py_ssize_t _sre_SRE_Match_start_impl(MatchObject *self, PyObject *group); static PyObject * -_sre_SRE_Match_start(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Match_start(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *group = NULL; @@ -1367,7 +1367,7 @@ _sre_SRE_Match_start(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) } group = args[0]; skip_optional: - _return_value = _sre_SRE_Match_start_impl(self, group); + _return_value = _sre_SRE_Match_start_impl((MatchObject *)self, group); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -1390,7 +1390,7 @@ static Py_ssize_t _sre_SRE_Match_end_impl(MatchObject *self, PyObject *group); static PyObject * -_sre_SRE_Match_end(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Match_end(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *group = NULL; @@ -1404,7 +1404,7 @@ _sre_SRE_Match_end(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) } group = args[0]; skip_optional: - _return_value = _sre_SRE_Match_end_impl(self, group); + _return_value = _sre_SRE_Match_end_impl((MatchObject *)self, group); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -1427,7 +1427,7 @@ static PyObject * _sre_SRE_Match_span_impl(MatchObject *self, PyObject *group); static PyObject * -_sre_SRE_Match_span(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Match_span(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *group = NULL; @@ -1440,7 +1440,7 @@ _sre_SRE_Match_span(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) } group = args[0]; skip_optional: - return_value = _sre_SRE_Match_span_impl(self, group); + return_value = _sre_SRE_Match_span_impl((MatchObject *)self, group); exit: return return_value; @@ -1458,9 +1458,9 @@ static PyObject * _sre_SRE_Match___copy___impl(MatchObject *self); static PyObject * -_sre_SRE_Match___copy__(MatchObject *self, PyObject *Py_UNUSED(ignored)) +_sre_SRE_Match___copy__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sre_SRE_Match___copy___impl(self); + return _sre_SRE_Match___copy___impl((MatchObject *)self); } PyDoc_STRVAR(_sre_SRE_Match___deepcopy____doc__, @@ -1483,13 +1483,13 @@ static PyObject * _sre_SRE_Scanner_match_impl(ScannerObject *self, PyTypeObject *cls); static PyObject * -_sre_SRE_Scanner_match(ScannerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Scanner_match(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "match() takes no arguments"); return NULL; } - return _sre_SRE_Scanner_match_impl(self, cls); + return _sre_SRE_Scanner_match_impl((ScannerObject *)self, cls); } PyDoc_STRVAR(_sre_SRE_Scanner_search__doc__, @@ -1504,16 +1504,16 @@ static PyObject * _sre_SRE_Scanner_search_impl(ScannerObject *self, PyTypeObject *cls); static PyObject * -_sre_SRE_Scanner_search(ScannerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Scanner_search(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "search() takes no arguments"); return NULL; } - return _sre_SRE_Scanner_search_impl(self, cls); + return _sre_SRE_Scanner_search_impl((ScannerObject *)self, cls); } #ifndef _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF #define _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF #endif /* !defined(_SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF) */ -/*[clinic end generated code: output=f8cb77f2261f0b2e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=3654103c87eb4830 input=a9049054013a1b77]*/ diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index 36f542ddb4df2b..0d8d4843d33c1b 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -395,6 +395,11 @@ static struct PyModuleDef sremodule; static PyObject*pattern_new_match(_sremodulestate *, PatternObject*, SRE_STATE*, Py_ssize_t); static PyObject *pattern_scanner(_sremodulestate *, PatternObject *, PyObject *, Py_ssize_t, Py_ssize_t); +#define _PatternObject_CAST(op) ((PatternObject *)(op)) +#define _MatchObject_CAST(op) ((MatchObject *)(op)) +#define _TemplateObject_CAST(op) ((TemplateObject *)(op)) +#define _ScannerObject_CAST(op) ((ScannerObject *)(op)) + /*[clinic input] module _sre class _sre.SRE_Pattern "PatternObject *" "get_sre_module_state_by_class(tp)->Pattern_Type" @@ -699,8 +704,9 @@ pattern_error(Py_ssize_t status) } static int -pattern_traverse(PatternObject *self, visitproc visit, void *arg) +pattern_traverse(PyObject *op, visitproc visit, void *arg) { + PatternObject *self = _PatternObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->groupindex); Py_VISIT(self->indexgroup); @@ -712,8 +718,9 @@ pattern_traverse(PatternObject *self, visitproc visit, void *arg) } static int -pattern_clear(PatternObject *self) +pattern_clear(PyObject *op) { + PatternObject *self = _PatternObject_CAST(op); Py_CLEAR(self->groupindex); Py_CLEAR(self->indexgroup); Py_CLEAR(self->pattern); @@ -724,13 +731,13 @@ pattern_clear(PatternObject *self) } static void -pattern_dealloc(PatternObject* self) +pattern_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); - PyObject_GC_UnTrack(self); - if (self->weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject *) self); + PatternObject *obj = _PatternObject_CAST(self); + if (obj->weakreflist != NULL) { + PyObject_ClearWeakRefs(self); } (void)pattern_clear(self); tp->tp_free(self); @@ -1162,7 +1169,7 @@ compile_template(_sremodulestate *module_state, /* delegate to Python code */ PyObject *func = module_state->compile_template; if (func == NULL) { - func = _PyImport_GetModuleAttrString("re", "_compile_template"); + func = PyImport_ImportModuleAttrString("re", "_compile_template"); if (func == NULL) { return NULL; } @@ -1497,7 +1504,7 @@ _sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, #endif /* Py_DEBUG */ static PyObject * -pattern_repr(PatternObject *obj) +pattern_repr(PyObject *self) { static const struct { const char *name; @@ -1512,6 +1519,8 @@ pattern_repr(PatternObject *obj) {"re.DEBUG", SRE_FLAG_DEBUG}, {"re.ASCII", SRE_FLAG_ASCII}, }; + + PatternObject *obj = _PatternObject_CAST(self); PyObject *result = NULL; PyObject *flag_items; size_t i; @@ -1579,8 +1588,9 @@ PyDoc_STRVAR(pattern_doc, "Compiled regular expression object."); /* PatternObject's 'groupindex' method. */ static PyObject * -pattern_groupindex(PatternObject *self, void *Py_UNUSED(ignored)) +pattern_groupindex(PyObject *op, void *Py_UNUSED(ignored)) { + PatternObject *self = _PatternObject_CAST(op); if (self->groupindex == NULL) return PyDict_New(); return PyDictProxy_New(self->groupindex); @@ -2245,8 +2255,9 @@ _validate(PatternObject *self) /* match methods */ static int -match_traverse(MatchObject *self, visitproc visit, void *arg) +match_traverse(PyObject *op, visitproc visit, void *arg) { + MatchObject *self = _MatchObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->string); Py_VISIT(self->regs); @@ -2255,8 +2266,9 @@ match_traverse(MatchObject *self, visitproc visit, void *arg) } static int -match_clear(MatchObject *self) +match_clear(PyObject *op) { + MatchObject *self = _MatchObject_CAST(op); Py_CLEAR(self->string); Py_CLEAR(self->regs); Py_CLEAR(self->pattern); @@ -2264,10 +2276,9 @@ match_clear(MatchObject *self) } static void -match_dealloc(MatchObject* self) +match_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); - PyObject_GC_UnTrack(self); (void)match_clear(self); tp->tp_free(self); @@ -2376,8 +2387,9 @@ _sre_SRE_Match_expand_impl(MatchObject *self, PyObject *template) } static PyObject* -match_group(MatchObject* self, PyObject* args) +match_group(PyObject *op, PyObject* args) { + MatchObject *self = _MatchObject_CAST(op); PyObject* result; Py_ssize_t i, size; @@ -2411,8 +2423,9 @@ match_group(MatchObject* self, PyObject* args) } static PyObject* -match_getitem(MatchObject* self, PyObject* name) +match_getitem(PyObject *op, PyObject* name) { + MatchObject *self = _MatchObject_CAST(op); return match_getslice(self, name, Py_None); } @@ -2654,16 +2667,18 @@ PyDoc_STRVAR(match_group_doc, For 0 returns the entire match."); static PyObject * -match_lastindex_get(MatchObject *self, void *Py_UNUSED(ignored)) +match_lastindex_get(PyObject *op, void *Py_UNUSED(ignored)) { + MatchObject *self = _MatchObject_CAST(op); if (self->lastindex >= 0) return PyLong_FromSsize_t(self->lastindex); Py_RETURN_NONE; } static PyObject * -match_lastgroup_get(MatchObject *self, void *Py_UNUSED(ignored)) +match_lastgroup_get(PyObject *op, void *Py_UNUSED(ignored)) { + MatchObject *self = _MatchObject_CAST(op); if (self->pattern->indexgroup && self->lastindex >= 0 && self->lastindex < PyTuple_GET_SIZE(self->pattern->indexgroup)) @@ -2676,8 +2691,9 @@ match_lastgroup_get(MatchObject *self, void *Py_UNUSED(ignored)) } static PyObject * -match_regs_get(MatchObject *self, void *Py_UNUSED(ignored)) +match_regs_get(PyObject *op, void *Py_UNUSED(ignored)) { + MatchObject *self = _MatchObject_CAST(op); if (self->regs) { return Py_NewRef(self->regs); } else @@ -2780,27 +2796,29 @@ pattern_new_match(_sremodulestate* module_state, /* scanner methods (experimental) */ static int -scanner_traverse(ScannerObject *self, visitproc visit, void *arg) +scanner_traverse(PyObject *op, visitproc visit, void *arg) { + ScannerObject *self = _ScannerObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->pattern); return 0; } static int -scanner_clear(ScannerObject *self) +scanner_clear(PyObject *op) { + ScannerObject *self = _ScannerObject_CAST(op); Py_CLEAR(self->pattern); return 0; } static void -scanner_dealloc(ScannerObject* self) +scanner_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); - PyObject_GC_UnTrack(self); - state_fini(&self->state); + ScannerObject *scanner = _ScannerObject_CAST(self); + state_fini(&scanner->state); (void)scanner_clear(self); tp->tp_free(self); Py_DECREF(tp); @@ -2957,8 +2975,9 @@ pattern_scanner(_sremodulestate *module_state, /* template methods */ static int -template_traverse(TemplateObject *self, visitproc visit, void *arg) +template_traverse(PyObject *op, visitproc visit, void *arg) { + TemplateObject *self = _TemplateObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->literal); for (Py_ssize_t i = 0, n = Py_SIZE(self); i < n; i++) { @@ -2968,8 +2987,9 @@ template_traverse(TemplateObject *self, visitproc visit, void *arg) } static int -template_clear(TemplateObject *self) +template_clear(PyObject *op) { + TemplateObject *self = _TemplateObject_CAST(op); Py_CLEAR(self->literal); for (Py_ssize_t i = 0, n = Py_SIZE(self); i < n; i++) { Py_CLEAR(self->items[i].literal); @@ -2978,10 +2998,9 @@ template_clear(TemplateObject *self) } static void -template_dealloc(TemplateObject *self) +template_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); - PyObject_GC_UnTrack(self); (void)template_clear(self); tp->tp_free(self); @@ -3056,8 +3075,10 @@ expand_template(TemplateObject *self, MatchObject *match) static Py_hash_t -pattern_hash(PatternObject *self) +pattern_hash(PyObject *op) { + PatternObject *self = _PatternObject_CAST(op); + Py_hash_t hash, hash2; hash = PyObject_Hash(self->pattern); @@ -3148,7 +3169,7 @@ static PyMethodDef pattern_methods[] = { }; static PyGetSetDef pattern_getset[] = { - {"groupindex", (getter)pattern_groupindex, (setter)NULL, + {"groupindex", pattern_groupindex, NULL, "A dictionary mapping group names to group numbers."}, {NULL} /* Sentinel */ }; @@ -3166,9 +3187,9 @@ static PyMemberDef pattern_members[] = { }; static PyType_Slot pattern_slots[] = { - {Py_tp_dealloc, (destructor)pattern_dealloc}, - {Py_tp_repr, (reprfunc)pattern_repr}, - {Py_tp_hash, (hashfunc)pattern_hash}, + {Py_tp_dealloc, pattern_dealloc}, + {Py_tp_repr, pattern_repr}, + {Py_tp_hash, pattern_hash}, {Py_tp_doc, (void *)pattern_doc}, {Py_tp_richcompare, pattern_richcompare}, {Py_tp_methods, pattern_methods}, @@ -3189,7 +3210,7 @@ static PyType_Spec pattern_spec = { }; static PyMethodDef match_methods[] = { - {"group", (PyCFunction) match_group, METH_VARARGS, match_group_doc}, + {"group", match_group, METH_VARARGS, match_group_doc}, _SRE_SRE_MATCH_START_METHODDEF _SRE_SRE_MATCH_END_METHODDEF _SRE_SRE_MATCH_SPAN_METHODDEF @@ -3204,11 +3225,11 @@ static PyMethodDef match_methods[] = { }; static PyGetSetDef match_getset[] = { - {"lastindex", (getter)match_lastindex_get, (setter)NULL, + {"lastindex", match_lastindex_get, NULL, "The integer index of the last matched capturing group."}, - {"lastgroup", (getter)match_lastgroup_get, (setter)NULL, + {"lastgroup", match_lastgroup_get, NULL, "The name of the last matched capturing group."}, - {"regs", (getter)match_regs_get, (setter)NULL}, + {"regs", match_regs_get, NULL, NULL}, {NULL} }; diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 74cf99957389e2..85e917fbbb7093 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -473,6 +473,7 @@ fill_and_set_sslerror(_sslmodulestate *state, PyObject *err_value = NULL, *reason_obj = NULL, *lib_obj = NULL; PyObject *verify_obj = NULL, *verify_code_obj = NULL; PyObject *init_value, *msg, *key; + PyUnicodeWriter *writer = NULL; if (errcode != 0) { int lib, reason; @@ -495,11 +496,10 @@ fill_and_set_sslerror(_sslmodulestate *state, if (lib_obj == NULL && PyErr_Occurred()) { goto fail; } - if (errstr == NULL) + if (errstr == NULL) { errstr = ERR_reason_error_string(errcode); + } } - if (errstr == NULL) - errstr = "unknown error"; /* verify code for cert validation error */ if ((sslsock != NULL) && (type == state->PySSLCertVerificationErrorObject)) { @@ -539,20 +539,50 @@ fill_and_set_sslerror(_sslmodulestate *state, } } - if (verify_obj && reason_obj && lib_obj) - msg = PyUnicode_FromFormat("[%S: %S] %s: %S (_ssl.c:%d)", - lib_obj, reason_obj, errstr, verify_obj, - lineno); - else if (reason_obj && lib_obj) - msg = PyUnicode_FromFormat("[%S: %S] %s (_ssl.c:%d)", - lib_obj, reason_obj, errstr, lineno); - else if (lib_obj) - msg = PyUnicode_FromFormat("[%S] %s (_ssl.c:%d)", - lib_obj, errstr, lineno); - else - msg = PyUnicode_FromFormat("%s (_ssl.c:%d)", errstr, lineno); - if (msg == NULL) + // Format message roughly as: + // [lib_obj: reason_obj] errstr: verify_obj (_ssl.c:lineno) + // with parts missing/replaced if unavailable + writer = PyUnicodeWriter_Create(64); + if (!writer) { + goto fail; + } + if (lib_obj) { + if (PyUnicodeWriter_Format(writer, "[%S", lib_obj) < 0) { + goto fail; + } + if (reason_obj) { + if (PyUnicodeWriter_Format(writer, ": %S", reason_obj) < 0) { + goto fail; + } + } + if (PyUnicodeWriter_WriteUTF8(writer, "] ", 2) < 0) { + goto fail; + } + } + if (errstr) { + if (PyUnicodeWriter_Format(writer, "%s", errstr) < 0) { + goto fail; + } + } + else { + if (PyUnicodeWriter_Format( + writer, "unknown error (0x%x)", errcode) < 0) { + goto fail; + } + } + if (verify_obj) { + if (PyUnicodeWriter_Format(writer, ": %S", verify_obj) < 0) { + goto fail; + } + } + if (PyUnicodeWriter_Format(writer, " (_ssl.c:%d)", lineno) < 0) { goto fail; + } + msg = PyUnicodeWriter_Finish(writer); + writer = NULL; + if (!msg) { + goto fail; + } init_value = Py_BuildValue("iN", ERR_GET_REASON(ssl_errno), msg); if (init_value == NULL) @@ -587,6 +617,7 @@ fill_and_set_sslerror(_sslmodulestate *state, Py_XDECREF(err_value); Py_XDECREF(verify_code_obj); Py_XDECREF(verify_obj); + PyUnicodeWriter_Discard(writer); } static int @@ -934,13 +965,13 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, } } if (owner && owner != Py_None) { - if (_ssl__SSLSocket_owner_set(self, owner, NULL) == -1) { + if (_ssl__SSLSocket_owner_set((PyObject *)self, owner, NULL) < 0) { Py_DECREF(self); return NULL; } } if (session && session != Py_None) { - if (_ssl__SSLSocket_session_set(self, session, NULL) == -1) { + if (_ssl__SSLSocket_session_set((PyObject *)self, session, NULL) < 0) { Py_DECREF(self); return NULL; } @@ -4377,7 +4408,7 @@ _ssl__SSLContext_load_dh_params_impl(PySSLContext *self, PyObject *filepath) FILE *f; DH *dh; - f = _Py_fopen_obj(filepath, "rb"); + f = Py_fopen(filepath, "rb"); if (f == NULL) return NULL; @@ -4635,7 +4666,8 @@ _servername_callback(SSL *s, int *al, void *args) servername_bytes = PyBytes_FromString(servername); if (servername_bytes == NULL) { - PyErr_WriteUnraisable((PyObject *) sslctx); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback"); goto error; } /* server_hostname was encoded to an A-label by our caller; put it @@ -4643,7 +4675,10 @@ _servername_callback(SSL *s, int *al, void *args) */ servername_str = PyUnicode_FromEncodedObject(servername_bytes, "ascii", NULL); if (servername_str == NULL) { - PyErr_WriteUnraisable(servername_bytes); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback " + "while decoding name %R", + servername_bytes); Py_DECREF(servername_bytes); goto error; } @@ -4656,7 +4691,10 @@ _servername_callback(SSL *s, int *al, void *args) Py_DECREF(ssl_socket); if (result == NULL) { - PyErr_WriteUnraisable(sslctx->set_sni_cb); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback " + "while calling set SNI callback %R", + sslctx->set_sni_cb); *al = SSL_AD_HANDSHAKE_FAILURE; ret = SSL_TLSEXT_ERR_ALERT_FATAL; } @@ -4669,7 +4707,11 @@ _servername_callback(SSL *s, int *al, void *args) } else { *al = (int) PyLong_AsLong(result); if (PyErr_Occurred()) { - PyErr_WriteUnraisable(result); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback " + "while calling set SNI callback " + "(result=%R)", + result); *al = SSL_AD_INTERNAL_ERROR; } ret = SSL_TLSEXT_ERR_ALERT_FATAL; @@ -4976,7 +5018,8 @@ static unsigned int psk_client_callback(SSL *s, error: if (PyErr_Occurred()) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored in ssl PSK client callback " + "while calling callback %R", callback); } PyGILState_Release(gstate); return 0; @@ -5085,7 +5128,8 @@ static unsigned int psk_server_callback(SSL *s, error: if (PyErr_Occurred()) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored in ssl PSK server callback " + "while calling callback %R", callback); } PyGILState_Release(gstate); return 0; diff --git a/Modules/_ssl/cert.c b/Modules/_ssl/cert.c index bda66dc4d94ae6..c11ed8e3a282e6 100644 --- a/Modules/_ssl/cert.c +++ b/Modules/_ssl/cert.c @@ -153,10 +153,13 @@ _x509name_print(_sslmodulestate *state, X509_NAME *name, int indent, unsigned lo * PySSLCertificate_Type */ +#define _PySSLCertificate_CAST(op) ((PySSLCertificate *)(op)) + static PyObject * -certificate_repr(PySSLCertificate *self) +certificate_repr(PyObject *op) { PyObject *osubject, *result; + PySSLCertificate *self = _PySSLCertificate_CAST(op); /* subject string is ASCII encoded, UTF-8 chars are quoted */ osubject = _x509name_print( @@ -176,8 +179,9 @@ certificate_repr(PySSLCertificate *self) } static Py_hash_t -certificate_hash(PySSLCertificate *self) +certificate_hash(PyObject *op) { + PySSLCertificate *self = _PySSLCertificate_CAST(op); if (self->hash == (Py_hash_t)-1) { unsigned long hash; hash = X509_subject_name_hash(self->cert); @@ -191,19 +195,20 @@ certificate_hash(PySSLCertificate *self) } static PyObject * -certificate_richcompare(PySSLCertificate *self, PyObject *other, int op) +certificate_richcompare(PyObject *lhs, PyObject *rhs, int op) { int cmp; + PySSLCertificate *self = _PySSLCertificate_CAST(lhs); _sslmodulestate *state = get_state_cert(self); - if (Py_TYPE(other) != state->PySSLCertificate_Type) { + if (Py_TYPE(rhs) != state->PySSLCertificate_Type) { Py_RETURN_NOTIMPLEMENTED; } /* only support == and != */ if ((op != Py_EQ) && (op != Py_NE)) { Py_RETURN_NOTIMPLEMENTED; } - cmp = X509_cmp(self->cert, ((PySSLCertificate*)other)->cert); + cmp = X509_cmp(self->cert, ((PySSLCertificate*)rhs)->cert); if (((op == Py_EQ) && (cmp == 0)) || ((op == Py_NE) && (cmp != 0))) { Py_RETURN_TRUE; } else { @@ -212,11 +217,12 @@ certificate_richcompare(PySSLCertificate *self, PyObject *other, int op) } static void -certificate_dealloc(PySSLCertificate *self) +certificate_dealloc(PyObject *op) { + PySSLCertificate *self = _PySSLCertificate_CAST(op); PyTypeObject *tp = Py_TYPE(self); X509_free(self->cert); - Py_TYPE(self)->tp_free(self); + (void)Py_TYPE(self)->tp_free(self); Py_DECREF(tp); } diff --git a/Modules/_ssl/clinic/cert.c.h b/Modules/_ssl/clinic/cert.c.h index 19559442cd9b88..3e0c5b405092db 100644 --- a/Modules/_ssl/clinic/cert.c.h +++ b/Modules/_ssl/clinic/cert.c.h @@ -20,7 +20,7 @@ static PyObject * _ssl_Certificate_public_bytes_impl(PySSLCertificate *self, int format); static PyObject * -_ssl_Certificate_public_bytes(PySSLCertificate *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl_Certificate_public_bytes(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -65,7 +65,7 @@ _ssl_Certificate_public_bytes(PySSLCertificate *self, PyObject *const *args, Py_ goto exit; } skip_optional_pos: - return_value = _ssl_Certificate_public_bytes_impl(self, format); + return_value = _ssl_Certificate_public_bytes_impl((PySSLCertificate *)self, format); exit: return return_value; @@ -83,8 +83,8 @@ static PyObject * _ssl_Certificate_get_info_impl(PySSLCertificate *self); static PyObject * -_ssl_Certificate_get_info(PySSLCertificate *self, PyObject *Py_UNUSED(ignored)) +_ssl_Certificate_get_info(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _ssl_Certificate_get_info_impl(self); + return _ssl_Certificate_get_info_impl((PySSLCertificate *)self); } -/*[clinic end generated code: output=e5fa354db5fc56b4 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=51365b498b975ee0 input=a9049054013a1b77]*/ diff --git a/Modules/_ssl/debughelpers.c b/Modules/_ssl/debughelpers.c index 9c87f8b4d21e68..318c045a0eec3c 100644 --- a/Modules/_ssl/debughelpers.c +++ b/Modules/_ssl/debughelpers.c @@ -180,8 +180,8 @@ _PySSLContext_set_keylog_filename(PySSLContext *self, PyObject *arg, void *c) { return 0; } - /* _Py_fopen_obj() also checks that arg is of proper type. */ - fp = _Py_fopen_obj(arg, "a" PY_STDIOTEXTMODE); + /* Py_fopen() also checks that arg is of proper type. */ + fp = Py_fopen(arg, "a" PY_STDIOTEXTMODE); if (fp == NULL) return -1; diff --git a/Modules/_testcapi/clinic/file.c.h b/Modules/_testcapi/clinic/file.c.h new file mode 100644 index 00000000000000..6efb6b47353443 --- /dev/null +++ b/Modules/_testcapi/clinic/file.c.h @@ -0,0 +1,64 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#include "pycore_modsupport.h" // _PyArg_CheckPositional() + +PyDoc_STRVAR(_testcapi_pyfile_newstdprinter__doc__, +"pyfile_newstdprinter($module, fd, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYFILE_NEWSTDPRINTER_METHODDEF \ + {"pyfile_newstdprinter", (PyCFunction)_testcapi_pyfile_newstdprinter, METH_O, _testcapi_pyfile_newstdprinter__doc__}, + +static PyObject * +_testcapi_pyfile_newstdprinter_impl(PyObject *module, int fd); + +static PyObject * +_testcapi_pyfile_newstdprinter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + int fd; + + fd = PyLong_AsInt(arg); + if (fd == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testcapi_pyfile_newstdprinter_impl(module, fd); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testcapi_py_fopen__doc__, +"py_fopen($module, path, mode, /)\n" +"--\n" +"\n" +"Call Py_fopen(), fread(256) and Py_fclose(). Return read bytes."); + +#define _TESTCAPI_PY_FOPEN_METHODDEF \ + {"py_fopen", _PyCFunction_CAST(_testcapi_py_fopen), METH_FASTCALL, _testcapi_py_fopen__doc__}, + +static PyObject * +_testcapi_py_fopen_impl(PyObject *module, PyObject *path, const char *mode, + Py_ssize_t mode_length); + +static PyObject * +_testcapi_py_fopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *path; + const char *mode; + Py_ssize_t mode_length; + + if (!_PyArg_ParseStack(args, nargs, "Oz#:py_fopen", + &path, &mode, &mode_length)) { + goto exit; + } + return_value = _testcapi_py_fopen_impl(module, path, mode, mode_length); + +exit: + return return_value; +} +/*[clinic end generated code: output=e943bbd7f181d079 input=a9049054013a1b77]*/ diff --git a/Modules/_testcapi/dict.c b/Modules/_testcapi/dict.c index 307797f98f12ae..b7c73d7332bd4e 100644 --- a/Modules/_testcapi/dict.c +++ b/Modules/_testcapi/dict.c @@ -181,6 +181,83 @@ dict_popstring_null(PyObject *self, PyObject *args) RETURN_INT(PyDict_PopString(dict, key, NULL)); } + +static int +test_dict_inner(PyObject *self, int count) +{ + Py_ssize_t pos = 0, iterations = 0; + int i; + PyObject *dict = PyDict_New(); + PyObject *v, *k; + + if (dict == NULL) + return -1; + + for (i = 0; i < count; i++) { + v = PyLong_FromLong(i); + if (v == NULL) { + goto error; + } + if (PyDict_SetItem(dict, v, v) < 0) { + Py_DECREF(v); + goto error; + } + Py_DECREF(v); + } + + k = v = UNINITIALIZED_PTR; + while (PyDict_Next(dict, &pos, &k, &v)) { + PyObject *o; + iterations++; + + assert(k != UNINITIALIZED_PTR); + assert(v != UNINITIALIZED_PTR); + i = PyLong_AS_LONG(v) + 1; + o = PyLong_FromLong(i); + if (o == NULL) { + goto error; + } + if (PyDict_SetItem(dict, k, o) < 0) { + Py_DECREF(o); + goto error; + } + Py_DECREF(o); + k = v = UNINITIALIZED_PTR; + } + assert(k == UNINITIALIZED_PTR); + assert(v == UNINITIALIZED_PTR); + + Py_DECREF(dict); + + if (iterations != count) { + PyErr_SetString( + PyExc_AssertionError, + "test_dict_iteration: dict iteration went wrong "); + return -1; + } else { + return 0; + } +error: + Py_DECREF(dict); + return -1; +} + + +static PyObject* +test_dict_iteration(PyObject* self, PyObject *Py_UNUSED(ignored)) +{ + int i; + + for (i = 0; i < 200; i++) { + if (test_dict_inner(self, i) < 0) { + return NULL; + } + } + + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"dict_containsstring", dict_containsstring, METH_VARARGS}, {"dict_getitemref", dict_getitemref, METH_VARARGS}, @@ -191,6 +268,7 @@ static PyMethodDef test_methods[] = { {"dict_pop_null", dict_pop_null, METH_VARARGS}, {"dict_popstring", dict_popstring, METH_VARARGS}, {"dict_popstring_null", dict_popstring_null, METH_VARARGS}, + {"test_dict_iteration", test_dict_iteration, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/file.c b/Modules/_testcapi/file.c index 634563f6ea12cb..060e0f50598d7e 100644 --- a/Modules/_testcapi/file.c +++ b/Modules/_testcapi/file.c @@ -1,17 +1,70 @@ +// clinic/file.c.h uses internal pycore_modsupport.h API +#define PYTESTCAPI_NEED_INTERNAL_API + #include "parts.h" #include "util.h" +#include "clinic/file.c.h" + + +/*[clinic input] +module _testcapi +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/ + + +/*[clinic input] +_testcapi.pyfile_newstdprinter + + fd: int + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyfile_newstdprinter_impl(PyObject *module, int fd) +/*[clinic end generated code: output=8a2d1c57b6892db3 input=442f1824142262ea]*/ +{ + return PyFile_NewStdPrinter(fd); +} + + +/*[clinic input] +_testcapi.py_fopen + + path: object + mode: str(zeroes=True, accept={robuffer, str, NoneType}) + / + +Call Py_fopen(), fread(256) and Py_fclose(). Return read bytes. +[clinic start generated code]*/ + +static PyObject * +_testcapi_py_fopen_impl(PyObject *module, PyObject *path, const char *mode, + Py_ssize_t mode_length) +/*[clinic end generated code: output=69840d0cfd8b7fbb input=f3a579dd7eb60926]*/ +{ + NULLABLE(path); + FILE *fp = Py_fopen(path, mode); + if (fp == NULL) { + return NULL; + } + + char buffer[256]; + size_t size = fread(buffer, 1, Py_ARRAY_LENGTH(buffer), fp); + Py_fclose(fp); + + return PyBytes_FromStringAndSize(buffer, size); +} static PyMethodDef test_methods[] = { + _TESTCAPI_PYFILE_NEWSTDPRINTER_METHODDEF + _TESTCAPI_PY_FOPEN_METHODDEF {NULL}, }; int _PyTestCapi_Init_File(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0){ - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/float.c b/Modules/_testcapi/float.c index 15ea97ec4520b7..e3869134c84d43 100644 --- a/Modules/_testcapi/float.c +++ b/Modules/_testcapi/float.c @@ -99,9 +99,68 @@ _testcapi_float_unpack_impl(PyObject *module, const char *data, return PyFloat_FromDouble(d); } + +/* Test PyOS_string_to_double. */ +static PyObject * +test_string_to_double(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + double result; + const char *msg; + +#define CHECK_STRING(STR, expected) \ + do { \ + result = PyOS_string_to_double(STR, NULL, NULL); \ + if (result == -1.0 && PyErr_Occurred()) { \ + return NULL; \ + } \ + if (result != (double)expected) { \ + msg = "conversion of " STR " to float failed"; \ + goto fail; \ + } \ + } while (0) + +#define CHECK_INVALID(STR) \ + do { \ + result = PyOS_string_to_double(STR, NULL, NULL); \ + if (result == -1.0 && PyErr_Occurred()) { \ + if (PyErr_ExceptionMatches(PyExc_ValueError)) { \ + PyErr_Clear(); \ + } \ + else { \ + return NULL; \ + } \ + } \ + else { \ + msg = "conversion of " STR " didn't raise ValueError"; \ + goto fail; \ + } \ + } while (0) + + CHECK_STRING("0.1", 0.1); + CHECK_STRING("1.234", 1.234); + CHECK_STRING("-1.35", -1.35); + CHECK_STRING(".1e01", 1.0); + CHECK_STRING("2.e-2", 0.02); + + CHECK_INVALID(" 0.1"); + CHECK_INVALID("\t\n-3"); + CHECK_INVALID(".123 "); + CHECK_INVALID("3\n"); + CHECK_INVALID("123abc"); + + Py_RETURN_NONE; + fail: + PyErr_Format(PyExc_AssertionError, "test_string_to_double: %s", msg); + return NULL; +#undef CHECK_STRING +#undef CHECK_INVALID +} + + static PyMethodDef test_methods[] = { _TESTCAPI_FLOAT_PACK_METHODDEF _TESTCAPI_FLOAT_UNPACK_METHODDEF + {"test_string_to_double", test_string_to_double, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/frame.c b/Modules/_testcapi/frame.c new file mode 100644 index 00000000000000..5748dca948ea94 --- /dev/null +++ b/Modules/_testcapi/frame.c @@ -0,0 +1,134 @@ +#include "parts.h" +#include "util.h" + +#include "frameobject.h" // PyFrame_New() + + +static PyObject * +frame_getlocals(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetLocals((PyFrameObject *)frame); +} + + +static PyObject * +frame_getglobals(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetGlobals((PyFrameObject *)frame); +} + + +static PyObject * +frame_getgenerator(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetGenerator((PyFrameObject *)frame); +} + + +static PyObject * +frame_getbuiltins(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetBuiltins((PyFrameObject *)frame); +} + + +static PyObject * +frame_getlasti(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + int lasti = PyFrame_GetLasti((PyFrameObject *)frame); + if (lasti < 0) { + assert(lasti == -1); + Py_RETURN_NONE; + } + return PyLong_FromLong(lasti); +} + + +static PyObject * +frame_new(PyObject *self, PyObject *args) +{ + PyObject *code, *globals, *locals; + if (!PyArg_ParseTuple(args, "OOO", &code, &globals, &locals)) { + return NULL; + } + if (!PyCode_Check(code)) { + PyErr_SetString(PyExc_TypeError, "argument must be a code object"); + return NULL; + } + PyThreadState *tstate = PyThreadState_Get(); + + return (PyObject *)PyFrame_New(tstate, (PyCodeObject *)code, globals, locals); +} + + +static PyObject * +frame_getvar(PyObject *self, PyObject *args) +{ + PyObject *frame, *name; + if (!PyArg_ParseTuple(args, "OO", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVar((PyFrameObject *)frame, name); +} + + +static PyObject * +frame_getvarstring(PyObject *self, PyObject *args) +{ + PyObject *frame; + const char *name; + if (!PyArg_ParseTuple(args, "Oy", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVarString((PyFrameObject *)frame, name); +} + + +static PyMethodDef test_methods[] = { + {"frame_getlocals", frame_getlocals, METH_O, NULL}, + {"frame_getglobals", frame_getglobals, METH_O, NULL}, + {"frame_getgenerator", frame_getgenerator, METH_O, NULL}, + {"frame_getbuiltins", frame_getbuiltins, METH_O, NULL}, + {"frame_getlasti", frame_getlasti, METH_O, NULL}, + {"frame_new", frame_new, METH_VARARGS, NULL}, + {"frame_getvar", frame_getvar, METH_VARARGS, NULL}, + {"frame_getvarstring", frame_getvarstring, METH_VARARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Frame(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} + diff --git a/Modules/_testcapi/function.c b/Modules/_testcapi/function.c new file mode 100644 index 00000000000000..ec1ba508df2ce9 --- /dev/null +++ b/Modules/_testcapi/function.c @@ -0,0 +1,143 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +function_get_code(PyObject *self, PyObject *func) +{ + PyObject *code = PyFunction_GetCode(func); + if (code != NULL) { + return Py_NewRef(code); + } else { + return NULL; + } +} + + +static PyObject * +function_get_globals(PyObject *self, PyObject *func) +{ + PyObject *globals = PyFunction_GetGlobals(func); + if (globals != NULL) { + return Py_NewRef(globals); + } else { + return NULL; + } +} + + +static PyObject * +function_get_module(PyObject *self, PyObject *func) +{ + PyObject *module = PyFunction_GetModule(func); + if (module != NULL) { + return Py_NewRef(module); + } else { + return NULL; + } +} + + +static PyObject * +function_get_defaults(PyObject *self, PyObject *func) +{ + PyObject *defaults = PyFunction_GetDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `defaults` are set to `None` + } +} + + +static PyObject * +function_set_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + int result = PyFunction_SetDefaults(func, defaults); + if (result == -1) + return NULL; + Py_RETURN_NONE; +} + + +static PyObject * +function_get_kw_defaults(PyObject *self, PyObject *func) +{ + PyObject *defaults = PyFunction_GetKwDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `kwdefaults` are set to `None` + } +} + + +static PyObject * +function_set_kw_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + int result = PyFunction_SetKwDefaults(func, defaults); + if (result == -1) + return NULL; + Py_RETURN_NONE; +} + + +static PyObject * +function_get_closure(PyObject *self, PyObject *func) +{ + PyObject *closure = PyFunction_GetClosure(func); + if (closure != NULL) { + return Py_NewRef(closure); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `closure` is set to `None` + } +} + + +static PyObject * +function_set_closure(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *closure = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &closure)) { + return NULL; + } + int result = PyFunction_SetClosure(func, closure); + if (result == -1) { + return NULL; + } + Py_RETURN_NONE; +} + + +static PyMethodDef test_methods[] = { + {"function_get_code", function_get_code, METH_O, NULL}, + {"function_get_globals", function_get_globals, METH_O, NULL}, + {"function_get_module", function_get_module, METH_O, NULL}, + {"function_get_defaults", function_get_defaults, METH_O, NULL}, + {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, + {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, + {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, + {"function_get_closure", function_get_closure, METH_O, NULL}, + {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Function(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} diff --git a/Modules/_testcapi/gc.c b/Modules/_testcapi/gc.c index 7e33e0d4861e84..3691796302e500 100644 --- a/Modules/_testcapi/gc.c +++ b/Modules/_testcapi/gc.c @@ -94,7 +94,7 @@ slot_tp_del(PyObject *self) PyObject *tp_del = PyUnicode_InternFromString("__tp_del__"); if (tp_del == NULL) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while deallocating"); PyErr_SetRaisedException(exc); return; } @@ -104,10 +104,13 @@ slot_tp_del(PyObject *self) if (del != NULL) { res = PyObject_CallOneArg(del, self); Py_DECREF(del); - if (res == NULL) - PyErr_WriteUnraisable(del); - else + if (res == NULL) { + PyErr_FormatUnraisable("Exception ignored while calling " + "deallocator %R", del); + } + else { Py_DECREF(res); + } } /* Restore the saved exception. */ diff --git a/Modules/_testcapi/immortal.c b/Modules/_testcapi/immortal.c index 9f81389811c645..0663c3781d426a 100644 --- a/Modules/_testcapi/immortal.c +++ b/Modules/_testcapi/immortal.c @@ -1,5 +1,8 @@ #include "parts.h" +#define Py_BUILD_CORE +#include "internal/pycore_long.h" // IMMORTALITY_BIT_MASK + int verify_immortality(PyObject *object) { assert(_Py_IsImmortal(object)); @@ -26,14 +29,31 @@ static PyObject * test_immortal_small_ints(PyObject *self, PyObject *Py_UNUSED(ignored)) { for (int i = -5; i <= 256; i++) { - assert(verify_immortality(PyLong_FromLong(i))); + PyObject *obj = PyLong_FromLong(i); + assert(verify_immortality(obj)); + int has_int_immortal_bit = ((PyLongObject *)obj)->long_value.lv_tag & IMMORTALITY_BIT_MASK; + assert(has_int_immortal_bit); + } + for (int i = 257; i <= 260; i++) { + PyObject *obj = PyLong_FromLong(i); + assert(obj); + int has_int_immortal_bit = ((PyLongObject *)obj)->long_value.lv_tag & IMMORTALITY_BIT_MASK; + assert(!has_int_immortal_bit); + Py_DECREF(obj); } Py_RETURN_NONE; } +static PyObject * +is_immortal(PyObject *self, PyObject *op) +{ + return PyBool_FromLong(PyUnstable_IsImmortal(op)); +} + static PyMethodDef test_methods[] = { {"test_immortal_builtins", test_immortal_builtins, METH_NOARGS}, {"test_immortal_small_ints", test_immortal_small_ints, METH_NOARGS}, + {"is_immortal", is_immortal, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/import.c b/Modules/_testcapi/import.c new file mode 100644 index 00000000000000..27d37498f3cd83 --- /dev/null +++ b/Modules/_testcapi/import.c @@ -0,0 +1,44 @@ +#include "parts.h" +#include "util.h" + +// Test PyImport_ImportModuleAttr() +static PyObject * +pyimport_importmoduleattr(PyObject *self, PyObject *args) +{ + PyObject *mod_name, *attr_name; + if (!PyArg_ParseTuple(args, "OO", &mod_name, &attr_name)) { + return NULL; + } + NULLABLE(mod_name); + NULLABLE(attr_name); + + return PyImport_ImportModuleAttr(mod_name, attr_name); +} + + +// Test PyImport_ImportModuleAttrString() +static PyObject * +pyimport_importmoduleattrstring(PyObject *self, PyObject *args) +{ + const char *mod_name, *attr_name; + Py_ssize_t len; + if (!PyArg_ParseTuple(args, "z#z#", &mod_name, &len, &attr_name, &len)) { + return NULL; + } + + return PyImport_ImportModuleAttrString(mod_name, attr_name); +} + + +static PyMethodDef test_methods[] = { + {"PyImport_ImportModuleAttr", pyimport_importmoduleattr, METH_VARARGS}, + {"PyImport_ImportModuleAttrString", pyimport_importmoduleattrstring, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Import(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} + diff --git a/Modules/_testcapi/list.c b/Modules/_testcapi/list.c index 09cec4c30c8c36..530b47780ac94e 100644 --- a/Modules/_testcapi/list.c +++ b/Modules/_testcapi/list.c @@ -60,22 +60,61 @@ list_extend(PyObject* Py_UNUSED(module), PyObject *args) } +static PyObject* +test_list_api(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject* list; + int i; + + /* SF bug 132008: PyList_Reverse segfaults */ +#define NLIST 30 + list = PyList_New(NLIST); + if (list == (PyObject*)NULL) + return (PyObject*)NULL; + /* list = range(NLIST) */ + for (i = 0; i < NLIST; ++i) { + PyObject* anint = PyLong_FromLong(i); + if (anint == (PyObject*)NULL) { + Py_DECREF(list); + return (PyObject*)NULL; + } + PyList_SET_ITEM(list, i, anint); + } + /* list.reverse(), via PyList_Reverse() */ + i = PyList_Reverse(list); /* should not blow up! */ + if (i != 0) { + Py_DECREF(list); + return (PyObject*)NULL; + } + /* Check that list == range(29, -1, -1) now */ + for (i = 0; i < NLIST; ++i) { + PyObject* anint = PyList_GET_ITEM(list, i); + if (PyLong_AS_LONG(anint) != NLIST-1-i) { + PyErr_SetString(PyExc_AssertionError, + "test_list_api: reverse screwed up"); + Py_DECREF(list); + return (PyObject*)NULL; + } + } + Py_DECREF(list); +#undef NLIST + + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"list_get_size", list_get_size, METH_O}, {"list_get_item", list_get_item, METH_VARARGS}, {"list_set_item", list_set_item, METH_VARARGS}, {"list_clear", list_clear, METH_O}, {"list_extend", list_extend, METH_VARARGS}, - + {"test_list_api", test_list_api, METH_NOARGS}, {NULL}, }; int _PyTestCapi_Init_List(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c index ab4ad934644c38..7237fb94c3f51f 100644 --- a/Modules/_testcapi/mem.c +++ b/Modules/_testcapi/mem.c @@ -557,8 +557,9 @@ tracemalloc_untrack(PyObject *self, PyObject *args) { unsigned int domain; PyObject *ptr_obj; + int release_gil = 0; - if (!PyArg_ParseTuple(args, "IO", &domain, &ptr_obj)) { + if (!PyArg_ParseTuple(args, "IO|i", &domain, &ptr_obj, &release_gil)) { return NULL; } void *ptr = PyLong_AsVoidPtr(ptr_obj); @@ -566,7 +567,15 @@ tracemalloc_untrack(PyObject *self, PyObject *args) return NULL; } - int res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr); + int res; + if (release_gil) { + Py_BEGIN_ALLOW_THREADS + res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr); + Py_END_ALLOW_THREADS + } + else { + res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr); + } if (res < 0) { PyErr_SetString(PyExc_RuntimeError, "PyTraceMalloc_Untrack error"); return NULL; @@ -575,6 +584,106 @@ tracemalloc_untrack(PyObject *self, PyObject *args) Py_RETURN_NONE; } + +static void +tracemalloc_track_race_thread(void *data) +{ + PyTraceMalloc_Track(123, 10, 1); + PyTraceMalloc_Untrack(123, 10); + + PyThread_type_lock lock = (PyThread_type_lock)data; + PyThread_release_lock(lock); +} + +// gh-128679: Test fix for tracemalloc.stop() race condition +static PyObject * +tracemalloc_track_race(PyObject *self, PyObject *args) +{ +#define NTHREAD 50 + PyObject *tracemalloc = NULL; + PyObject *stop = NULL; + PyThread_type_lock locks[NTHREAD]; + memset(locks, 0, sizeof(locks)); + + // Call tracemalloc.start() + tracemalloc = PyImport_ImportModule("tracemalloc"); + if (tracemalloc == NULL) { + goto error; + } + PyObject *start = PyObject_GetAttrString(tracemalloc, "start"); + if (start == NULL) { + goto error; + } + PyObject *res = PyObject_CallNoArgs(start); + Py_DECREF(start); + if (res == NULL) { + goto error; + } + Py_DECREF(res); + + stop = PyObject_GetAttrString(tracemalloc, "stop"); + Py_CLEAR(tracemalloc); + if (stop == NULL) { + goto error; + } + + // Start threads + for (size_t i = 0; i < NTHREAD; i++) { + PyThread_type_lock lock = PyThread_allocate_lock(); + if (!lock) { + PyErr_NoMemory(); + goto error; + } + locks[i] = lock; + PyThread_acquire_lock(lock, 1); + + unsigned long thread; + thread = PyThread_start_new_thread(tracemalloc_track_race_thread, + (void*)lock); + if (thread == (unsigned long)-1) { + PyErr_SetString(PyExc_RuntimeError, "can't start new thread"); + goto error; + } + } + + // Call tracemalloc.stop() while threads are running + res = PyObject_CallNoArgs(stop); + Py_CLEAR(stop); + if (res == NULL) { + goto error; + } + Py_DECREF(res); + + // Wait until threads complete with the GIL released + Py_BEGIN_ALLOW_THREADS + for (size_t i = 0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + PyThread_acquire_lock(lock, 1); + PyThread_release_lock(lock); + } + Py_END_ALLOW_THREADS + + // Free threads locks + for (size_t i=0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + PyThread_free_lock(lock); + } + Py_RETURN_NONE; + +error: + Py_CLEAR(tracemalloc); + Py_CLEAR(stop); + for (size_t i=0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + if (lock) { + PyThread_free_lock(lock); + } + } + return NULL; +#undef NTHREAD +} + + static PyMethodDef test_methods[] = { {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, @@ -593,6 +702,7 @@ static PyMethodDef test_methods[] = { // Tracemalloc tests {"tracemalloc_track", tracemalloc_track, METH_VARARGS}, {"tracemalloc_untrack", tracemalloc_untrack, METH_VARARGS}, + {"tracemalloc_track_race", tracemalloc_track_race, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/object.c b/Modules/_testcapi/object.c index 3af5429ef00985..2d538627d213fd 100644 --- a/Modules/_testcapi/object.c +++ b/Modules/_testcapi/object.c @@ -15,7 +15,7 @@ call_pyobject_print(PyObject *self, PyObject * args) return NULL; } - fp = _Py_fopen_obj(filename, "w+"); + fp = Py_fopen(filename, "w+"); if (Py_IsTrue(print_raw)) { flags = Py_PRINT_RAW; @@ -41,7 +41,7 @@ pyobject_print_null(PyObject *self, PyObject *args) return NULL; } - fp = _Py_fopen_obj(filename, "w+"); + fp = Py_fopen(filename, "w+"); if (PyObject_Print(NULL, fp, 0) < 0) { fclose(fp); @@ -72,7 +72,7 @@ pyobject_print_noref_object(PyObject *self, PyObject *args) return NULL; } - fp = _Py_fopen_obj(filename, "w+"); + fp = Py_fopen(filename, "w+"); if (PyObject_Print(test_string, fp, 0) < 0){ fclose(fp); @@ -103,7 +103,7 @@ pyobject_print_os_error(PyObject *self, PyObject *args) } // open file in read mode to induce OSError - fp = _Py_fopen_obj(filename, "r"); + fp = Py_fopen(filename, "r"); if (PyObject_Print(test_string, fp, 0) < 0) { fclose(fp); @@ -131,6 +131,346 @@ pyobject_enable_deferred_refcount(PyObject *self, PyObject *obj) return PyLong_FromLong(result); } +static int MyObject_dealloc_called = 0; + +static void +MyObject_dealloc(PyObject *op) +{ + // PyUnstable_TryIncRef should return 0 if object is being deallocated + assert(Py_REFCNT(op) == 0); + assert(!PyUnstable_TryIncRef(op)); + assert(Py_REFCNT(op) == 0); + + MyObject_dealloc_called++; + Py_TYPE(op)->tp_free(op); +} + +static PyTypeObject MyType = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "MyType", + .tp_basicsize = sizeof(PyObject), + .tp_dealloc = MyObject_dealloc, +}; + +static PyObject * +test_py_try_inc_ref(PyObject *self, PyObject *unused) +{ + if (PyType_Ready(&MyType) < 0) { + return NULL; + } + + MyObject_dealloc_called = 0; + + PyObject *op = PyObject_New(PyObject, &MyType); + if (op == NULL) { + return NULL; + } + + PyUnstable_EnableTryIncRef(op); +#ifdef Py_GIL_DISABLED + // PyUnstable_EnableTryIncRef sets the shared flags to + // `_Py_REF_MAYBE_WEAKREF` if the flags are currently zero to ensure that + // the shared reference count is merged on deallocation. + assert((op->ob_ref_shared & _Py_REF_SHARED_FLAG_MASK) >= _Py_REF_MAYBE_WEAKREF); +#endif + + if (!PyUnstable_TryIncRef(op)) { + PyErr_SetString(PyExc_AssertionError, "PyUnstable_TryIncRef failed"); + Py_DECREF(op); + return NULL; + } + Py_DECREF(op); // undo try-incref + Py_DECREF(op); // dealloc + assert(MyObject_dealloc_called == 1); + Py_RETURN_NONE; +} + + +static PyObject * +_test_incref(PyObject *ob) +{ + return Py_NewRef(ob); +} + +static PyObject * +test_xincref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyLong_FromLong(0); + Py_XINCREF(_test_incref(obj)); + Py_DECREF(obj); + Py_DECREF(obj); + Py_DECREF(obj); + Py_RETURN_NONE; +} + + +static PyObject * +test_incref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyLong_FromLong(0); + Py_INCREF(_test_incref(obj)); + Py_DECREF(obj); + Py_DECREF(obj); + Py_DECREF(obj); + Py_RETURN_NONE; +} + + +static PyObject * +test_xdecref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + Py_XDECREF(PyLong_FromLong(0)); + Py_RETURN_NONE; +} + + +static PyObject * +test_decref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + Py_DECREF(PyLong_FromLong(0)); + Py_RETURN_NONE; +} + + +static PyObject * +test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyLong_FromLong(0); + Py_IncRef(obj); + Py_DecRef(obj); + Py_DecRef(obj); + Py_RETURN_NONE; +} + + +#ifdef Py_REF_DEBUG +static PyObject * +negative_refcount(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *obj = PyUnicode_FromString("negative_refcount"); + if (obj == NULL) { + return NULL; + } + assert(Py_REFCNT(obj) == 1); + + Py_SET_REFCNT(obj, 0); + /* Py_DECREF() must call _Py_NegativeRefcount() and abort Python */ + Py_DECREF(obj); + + Py_RETURN_NONE; +} + + +static PyObject * +decref_freed_object(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *obj = PyUnicode_FromString("decref_freed_object"); + if (obj == NULL) { + return NULL; + } + assert(Py_REFCNT(obj) == 1); + + // Deallocate the memory + Py_DECREF(obj); + // obj is a now a dangling pointer + + // gh-109496: If Python is built in debug mode, Py_DECREF() must call + // _Py_NegativeRefcount() and abort Python. + Py_DECREF(obj); + + Py_RETURN_NONE; +} +#endif + + +// Test Py_CLEAR() macro +static PyObject* +test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // simple case with a variable + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + Py_CLEAR(obj); + assert(obj == NULL); + + // gh-98724: complex case, Py_CLEAR() argument has a side effect + PyObject* array[1]; + array[0] = PyList_New(0); + if (array[0] == NULL) { + return NULL; + } + + PyObject **p = array; + Py_CLEAR(*p++); + assert(array[0] == NULL); + assert(p == array + 1); + + Py_RETURN_NONE; +} + + +// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear() +static PyObject* +test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // Py_SETREF() simple case with a variable + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + Py_SETREF(obj, NULL); + assert(obj == NULL); + + // Py_XSETREF() simple case with a variable + PyObject *obj2 = PyList_New(0); + if (obj2 == NULL) { + return NULL; + } + Py_XSETREF(obj2, NULL); + assert(obj2 == NULL); + // test Py_XSETREF() when the argument is NULL + Py_XSETREF(obj2, NULL); + assert(obj2 == NULL); + + // gh-98724: complex case, Py_SETREF() argument has a side effect + PyObject* array[1]; + array[0] = PyList_New(0); + if (array[0] == NULL) { + return NULL; + } + + PyObject **p = array; + Py_SETREF(*p++, NULL); + assert(array[0] == NULL); + assert(p == array + 1); + + // gh-98724: complex case, Py_XSETREF() argument has a side effect + PyObject* array2[1]; + array2[0] = PyList_New(0); + if (array2[0] == NULL) { + return NULL; + } + + PyObject **p2 = array2; + Py_XSETREF(*p2++, NULL); + assert(array2[0] == NULL); + assert(p2 == array2 + 1); + + // test Py_XSETREF() when the argument is NULL + p2 = array2; + Py_XSETREF(*p2++, NULL); + assert(array2[0] == NULL); + assert(p2 == array2 + 1); + + Py_RETURN_NONE; +} + + +#define TEST_REFCOUNT() \ + do { \ + PyObject *obj = PyList_New(0); \ + if (obj == NULL) { \ + return NULL; \ + } \ + assert(Py_REFCNT(obj) == 1); \ + \ + /* test Py_NewRef() */ \ + PyObject *ref = Py_NewRef(obj); \ + assert(ref == obj); \ + assert(Py_REFCNT(obj) == 2); \ + Py_DECREF(ref); \ + \ + /* test Py_XNewRef() */ \ + PyObject *xref = Py_XNewRef(obj); \ + assert(xref == obj); \ + assert(Py_REFCNT(obj) == 2); \ + Py_DECREF(xref); \ + \ + assert(Py_XNewRef(NULL) == NULL); \ + \ + Py_DECREF(obj); \ + Py_RETURN_NONE; \ + } while (0) + + +// Test Py_NewRef() and Py_XNewRef() macros +static PyObject* +test_refcount_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_REFCOUNT(); +} + +#undef Py_NewRef +#undef Py_XNewRef + +// Test Py_NewRef() and Py_XNewRef() functions, after undefining macros. +static PyObject* +test_refcount_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_REFCOUNT(); +} + + +// Test Py_Is() function +#define TEST_PY_IS() \ + do { \ + PyObject *o_none = Py_None; \ + PyObject *o_true = Py_True; \ + PyObject *o_false = Py_False; \ + PyObject *obj = PyList_New(0); \ + if (obj == NULL) { \ + return NULL; \ + } \ + \ + /* test Py_Is() */ \ + assert(Py_Is(obj, obj)); \ + assert(!Py_Is(obj, o_none)); \ + \ + /* test Py_None */ \ + assert(Py_Is(o_none, o_none)); \ + assert(!Py_Is(obj, o_none)); \ + \ + /* test Py_True */ \ + assert(Py_Is(o_true, o_true)); \ + assert(!Py_Is(o_false, o_true)); \ + assert(!Py_Is(obj, o_true)); \ + \ + /* test Py_False */ \ + assert(Py_Is(o_false, o_false)); \ + assert(!Py_Is(o_true, o_false)); \ + assert(!Py_Is(obj, o_false)); \ + \ + Py_DECREF(obj); \ + Py_RETURN_NONE; \ + } while (0) + +// Test Py_Is() macro +static PyObject* +test_py_is_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_PY_IS(); +} + +#undef Py_Is + +// Test Py_Is() function, after undefining its macro. +static PyObject* +test_py_is_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_PY_IS(); +} + + +static PyObject * +clear_managed_dict(PyObject *self, PyObject *obj) +{ + PyObject_ClearManagedDict(obj); + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"call_pyobject_print", call_pyobject_print, METH_VARARGS}, {"pyobject_print_null", pyobject_print_null, METH_VARARGS}, @@ -138,15 +478,28 @@ static PyMethodDef test_methods[] = { {"pyobject_print_os_error", pyobject_print_os_error, METH_VARARGS}, {"pyobject_clear_weakrefs_no_callbacks", pyobject_clear_weakrefs_no_callbacks, METH_O}, {"pyobject_enable_deferred_refcount", pyobject_enable_deferred_refcount, METH_O}, + {"test_py_try_inc_ref", test_py_try_inc_ref, METH_NOARGS}, + {"test_xincref_doesnt_leak",test_xincref_doesnt_leak, METH_NOARGS}, + {"test_incref_doesnt_leak", test_incref_doesnt_leak, METH_NOARGS}, + {"test_xdecref_doesnt_leak",test_xdecref_doesnt_leak, METH_NOARGS}, + {"test_decref_doesnt_leak", test_decref_doesnt_leak, METH_NOARGS}, + {"test_incref_decref_API", test_incref_decref_API, METH_NOARGS}, +#ifdef Py_REF_DEBUG + {"negative_refcount", negative_refcount, METH_NOARGS}, + {"decref_freed_object", decref_freed_object, METH_NOARGS}, +#endif + {"test_py_clear", test_py_clear, METH_NOARGS}, + {"test_py_setref", test_py_setref, METH_NOARGS}, + {"test_refcount_macros", test_refcount_macros, METH_NOARGS}, + {"test_refcount_funcs", test_refcount_funcs, METH_NOARGS}, + {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, + {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, + {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, {NULL}, }; int _PyTestCapi_Init_Object(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 65ba77596c760e..af6400162daf2b 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -61,5 +61,9 @@ int _PyTestCapi_Init_Time(PyObject *module); int _PyTestCapi_Init_Monitoring(PyObject *module); int _PyTestCapi_Init_Object(PyObject *module); int _PyTestCapi_Init_Config(PyObject *mod); +int _PyTestCapi_Init_Import(PyObject *mod); +int _PyTestCapi_Init_Frame(PyObject *mod); +int _PyTestCapi_Init_Type(PyObject *mod); +int _PyTestCapi_Init_Function(PyObject *mod); #endif // Py_TESTCAPI_PARTS_H diff --git a/Modules/_testcapi/set.c b/Modules/_testcapi/set.c index 31b52cee5e9623..092715ab7d0aa4 100644 --- a/Modules/_testcapi/set.c +++ b/Modules/_testcapi/set.c @@ -8,18 +8,37 @@ set_get_size(PyObject *self, PyObject *obj) RETURN_SIZE(PySet_GET_SIZE(obj)); } + +static PyObject* +test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + + // Ensure that following tests don't modify the object, + // to ensure that Py_DECREF() will not crash. + assert(Py_TYPE(obj) == &PyList_Type); + assert(Py_SIZE(obj) == 0); + + // bpo-39573: Test Py_SET_TYPE() and Py_SET_SIZE() functions. + Py_SET_TYPE(obj, &PyList_Type); + Py_SET_SIZE(obj, 0); + + Py_DECREF(obj); + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"set_get_size", set_get_size, METH_O}, - + {"test_set_type_size", test_set_type_size, METH_NOARGS}, {NULL}, }; int _PyTestCapi_Init_Set(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/type.c b/Modules/_testcapi/type.c new file mode 100644 index 00000000000000..9bef58d1f83668 --- /dev/null +++ b/Modules/_testcapi/type.c @@ -0,0 +1,251 @@ +#include "parts.h" +#include "util.h" + + +static PyType_Slot HeapTypeNameType_slots[] = { + {0}, +}; + +static PyType_Spec HeapTypeNameType_Spec = { + .name = "_testcapi.HeapTypeNameType", + .basicsize = sizeof(PyObject), + .flags = Py_TPFLAGS_DEFAULT, + .slots = HeapTypeNameType_slots, +}; + +static PyObject * +get_heaptype_for_name(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return PyType_FromSpec(&HeapTypeNameType_Spec); +} + + +static PyObject * +get_type_name(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetName((PyTypeObject *)type); +} + + +static PyObject * +get_type_qualname(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetQualName((PyTypeObject *)type); +} + + +static PyObject * +get_type_fullyqualname(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetFullyQualifiedName((PyTypeObject *)type); +} + + +static PyObject * +get_type_module_name(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetModuleName((PyTypeObject *)type); +} + + +static PyObject * +test_get_type_dict(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + /* Test for PyType_GetDict */ + + // Assert ints have a `to_bytes` method + PyObject *long_dict = PyType_GetDict(&PyLong_Type); + assert(long_dict); + assert(PyDict_GetItemString(long_dict, "to_bytes")); // borrowed ref + Py_DECREF(long_dict); + + // Make a new type, add an attribute to it and assert it's there + PyObject *HeapTypeNameType = PyType_FromSpec(&HeapTypeNameType_Spec); + assert(HeapTypeNameType); + assert(PyObject_SetAttrString( + HeapTypeNameType, "new_attr", Py_NewRef(Py_None)) >= 0); + PyObject *type_dict = PyType_GetDict((PyTypeObject*)HeapTypeNameType); + assert(type_dict); + assert(PyDict_GetItemString(type_dict, "new_attr")); // borrowed ref + Py_DECREF(HeapTypeNameType); + Py_DECREF(type_dict); + Py_RETURN_NONE; +} + + +static PyObject * +test_get_statictype_slots(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + newfunc tp_new = PyType_GetSlot(&PyLong_Type, Py_tp_new); + if (PyLong_Type.tp_new != tp_new) { + PyErr_SetString(PyExc_AssertionError, "mismatch: tp_new of long"); + return NULL; + } + + reprfunc tp_repr = PyType_GetSlot(&PyLong_Type, Py_tp_repr); + if (PyLong_Type.tp_repr != tp_repr) { + PyErr_SetString(PyExc_AssertionError, "mismatch: tp_repr of long"); + return NULL; + } + + ternaryfunc tp_call = PyType_GetSlot(&PyLong_Type, Py_tp_call); + if (tp_call != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: tp_call of long"); + return NULL; + } + + binaryfunc nb_add = PyType_GetSlot(&PyLong_Type, Py_nb_add); + if (PyLong_Type.tp_as_number->nb_add != nb_add) { + PyErr_SetString(PyExc_AssertionError, "mismatch: nb_add of long"); + return NULL; + } + + lenfunc mp_length = PyType_GetSlot(&PyLong_Type, Py_mp_length); + if (mp_length != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: mp_length of long"); + return NULL; + } + + void *over_value = PyType_GetSlot(&PyLong_Type, Py_bf_releasebuffer + 1); + if (over_value != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: max+1 of long"); + return NULL; + } + + tp_new = PyType_GetSlot(&PyLong_Type, 0); + if (tp_new != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: slot 0 of long"); + return NULL; + } + if (PyErr_ExceptionMatches(PyExc_SystemError)) { + // This is the right exception + PyErr_Clear(); + } + else { + return NULL; + } + + Py_RETURN_NONE; +} + + +// Get type->tp_version_tag +static PyObject * +type_get_version(PyObject *self, PyObject *type) +{ + if (!PyType_Check(type)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyObject *res = PyLong_FromUnsignedLong( + ((PyTypeObject *)type)->tp_version_tag); + if (res == NULL) { + assert(PyErr_Occurred()); + return NULL; + } + return res; +} + +static PyObject * +type_modified(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + PyType_Modified(type); + Py_RETURN_NONE; +} + + +static PyObject * +type_assign_version(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + int res = PyUnstable_Type_AssignVersionTag(type); + return PyLong_FromLong(res); +} + + +static PyObject * +type_get_tp_bases(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + PyObject *bases = type->tp_bases; + if (bases == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(bases); +} + +static PyObject * +type_get_tp_mro(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + PyObject *mro = ((PyTypeObject *)type)->tp_mro; + if (mro == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(mro); +} + + +static PyObject * +type_freeze(PyObject *module, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + if (PyType_Freeze(type) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + + +static PyMethodDef test_methods[] = { + {"get_heaptype_for_name", get_heaptype_for_name, METH_NOARGS}, + {"get_type_name", get_type_name, METH_O}, + {"get_type_qualname", get_type_qualname, METH_O}, + {"get_type_fullyqualname", get_type_fullyqualname, METH_O}, + {"get_type_module_name", get_type_module_name, METH_O}, + {"test_get_type_dict", test_get_type_dict, METH_NOARGS}, + {"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS}, + {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, + {"type_modified", type_modified, METH_O, PyDoc_STR("PyType_Modified")}, + {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")}, + {"type_get_tp_bases", type_get_tp_bases, METH_O}, + {"type_get_tp_mro", type_get_tp_mro, METH_O}, + {"type_freeze", type_freeze, METH_O}, + {NULL}, +}; + +int +_PyTestCapi_Init_Type(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} diff --git a/Modules/_testcapi/watchers.c b/Modules/_testcapi/watchers.c index 321d3aeffb6ad1..f7440769b9594e 100644 --- a/Modules/_testcapi/watchers.c +++ b/Modules/_testcapi/watchers.c @@ -428,7 +428,8 @@ allocate_too_many_code_watchers(PyObject *self, PyObject *args) PyObject *exc = PyErr_GetRaisedException(); for (int i = 0; i < num_watchers; i++) { if (PyCode_ClearWatcher(watcher_ids[i]) < 0) { - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "clearing code watcher"); break; } } @@ -609,7 +610,8 @@ allocate_too_many_func_watchers(PyObject *self, PyObject *args) PyObject *exc = PyErr_GetRaisedException(); for (int i = 0; i < num_watchers; i++) { if (PyFunction_ClearWatcher(watcher_ids[i]) < 0) { - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "clearing function watcher"); break; } } @@ -755,7 +757,8 @@ allocate_too_many_context_watchers(PyObject *self, PyObject *args) PyObject *exc = PyErr_GetRaisedException(); for (int i = 0; i < num_watchers; i++) { if (PyContext_ClearWatcher(watcher_ids[i]) < 0) { - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "clearing context watcher"); break; } } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index f737250ac29d57..09e74fd3cf20af 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -163,124 +163,6 @@ test_sizeof_c_types(PyObject *self, PyObject *Py_UNUSED(ignored)) #endif } -static PyObject* -test_list_api(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject* list; - int i; - - /* SF bug 132008: PyList_Reverse segfaults */ -#define NLIST 30 - list = PyList_New(NLIST); - if (list == (PyObject*)NULL) - return (PyObject*)NULL; - /* list = range(NLIST) */ - for (i = 0; i < NLIST; ++i) { - PyObject* anint = PyLong_FromLong(i); - if (anint == (PyObject*)NULL) { - Py_DECREF(list); - return (PyObject*)NULL; - } - PyList_SET_ITEM(list, i, anint); - } - /* list.reverse(), via PyList_Reverse() */ - i = PyList_Reverse(list); /* should not blow up! */ - if (i != 0) { - Py_DECREF(list); - return (PyObject*)NULL; - } - /* Check that list == range(29, -1, -1) now */ - for (i = 0; i < NLIST; ++i) { - PyObject* anint = PyList_GET_ITEM(list, i); - if (PyLong_AS_LONG(anint) != NLIST-1-i) { - PyErr_SetString(get_testerror(self), - "test_list_api: reverse screwed up"); - Py_DECREF(list); - return (PyObject*)NULL; - } - } - Py_DECREF(list); -#undef NLIST - - Py_RETURN_NONE; -} - -static int -test_dict_inner(PyObject *self, int count) -{ - Py_ssize_t pos = 0, iterations = 0; - int i; - PyObject *dict = PyDict_New(); - PyObject *v, *k; - - if (dict == NULL) - return -1; - - for (i = 0; i < count; i++) { - v = PyLong_FromLong(i); - if (v == NULL) { - goto error; - } - if (PyDict_SetItem(dict, v, v) < 0) { - Py_DECREF(v); - goto error; - } - Py_DECREF(v); - } - - k = v = UNINITIALIZED_PTR; - while (PyDict_Next(dict, &pos, &k, &v)) { - PyObject *o; - iterations++; - - assert(k != UNINITIALIZED_PTR); - assert(v != UNINITIALIZED_PTR); - i = PyLong_AS_LONG(v) + 1; - o = PyLong_FromLong(i); - if (o == NULL) { - goto error; - } - if (PyDict_SetItem(dict, k, o) < 0) { - Py_DECREF(o); - goto error; - } - Py_DECREF(o); - k = v = UNINITIALIZED_PTR; - } - assert(k == UNINITIALIZED_PTR); - assert(v == UNINITIALIZED_PTR); - - Py_DECREF(dict); - - if (iterations != count) { - PyErr_SetString( - get_testerror(self), - "test_dict_iteration: dict iteration went wrong "); - return -1; - } else { - return 0; - } -error: - Py_DECREF(dict); - return -1; -} - - - -static PyObject* -test_dict_iteration(PyObject* self, PyObject *Py_UNUSED(ignored)) -{ - int i; - - for (i = 0; i < 200; i++) { - if (test_dict_inner(self, i) < 0) { - return NULL; - } - } - - Py_RETURN_NONE; -} - /* Issue #4701: Check that PyObject_Hash implicitly calls * PyType_Ready if it hasn't already been called */ @@ -530,136 +412,6 @@ test_buildvalue_N(PyObject *self, PyObject *Py_UNUSED(ignored)) } -static PyObject * -test_get_statictype_slots(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - newfunc tp_new = PyType_GetSlot(&PyLong_Type, Py_tp_new); - if (PyLong_Type.tp_new != tp_new) { - PyErr_SetString(PyExc_AssertionError, "mismatch: tp_new of long"); - return NULL; - } - - reprfunc tp_repr = PyType_GetSlot(&PyLong_Type, Py_tp_repr); - if (PyLong_Type.tp_repr != tp_repr) { - PyErr_SetString(PyExc_AssertionError, "mismatch: tp_repr of long"); - return NULL; - } - - ternaryfunc tp_call = PyType_GetSlot(&PyLong_Type, Py_tp_call); - if (tp_call != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: tp_call of long"); - return NULL; - } - - binaryfunc nb_add = PyType_GetSlot(&PyLong_Type, Py_nb_add); - if (PyLong_Type.tp_as_number->nb_add != nb_add) { - PyErr_SetString(PyExc_AssertionError, "mismatch: nb_add of long"); - return NULL; - } - - lenfunc mp_length = PyType_GetSlot(&PyLong_Type, Py_mp_length); - if (mp_length != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: mp_length of long"); - return NULL; - } - - void *over_value = PyType_GetSlot(&PyLong_Type, Py_bf_releasebuffer + 1); - if (over_value != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: max+1 of long"); - return NULL; - } - - tp_new = PyType_GetSlot(&PyLong_Type, 0); - if (tp_new != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: slot 0 of long"); - return NULL; - } - if (PyErr_ExceptionMatches(PyExc_SystemError)) { - // This is the right exception - PyErr_Clear(); - } - else { - return NULL; - } - - Py_RETURN_NONE; -} - - -static PyType_Slot HeapTypeNameType_slots[] = { - {0}, -}; - -static PyType_Spec HeapTypeNameType_Spec = { - .name = "_testcapi.HeapTypeNameType", - .basicsize = sizeof(PyObject), - .flags = Py_TPFLAGS_DEFAULT, - .slots = HeapTypeNameType_slots, -}; - -static PyObject * -get_heaptype_for_name(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyType_FromSpec(&HeapTypeNameType_Spec); -} - - -static PyObject * -get_type_name(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetName((PyTypeObject *)type); -} - - -static PyObject * -get_type_qualname(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetQualName((PyTypeObject *)type); -} - - -static PyObject * -get_type_fullyqualname(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetFullyQualifiedName((PyTypeObject *)type); -} - - -static PyObject * -get_type_module_name(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetModuleName((PyTypeObject *)type); -} - - -static PyObject * -test_get_type_dict(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - /* Test for PyType_GetDict */ - - // Assert ints have a `to_bytes` method - PyObject *long_dict = PyType_GetDict(&PyLong_Type); - assert(long_dict); - assert(PyDict_GetItemString(long_dict, "to_bytes")); // borrowed ref - Py_DECREF(long_dict); - - // Make a new type, add an attribute to it and assert it's there - PyObject *HeapTypeNameType = PyType_FromSpec(&HeapTypeNameType_Spec); - assert(HeapTypeNameType); - assert(PyObject_SetAttrString( - HeapTypeNameType, "new_attr", Py_NewRef(Py_None)) >= 0); - PyObject *type_dict = PyType_GetDict((PyTypeObject*)HeapTypeNameType); - assert(type_dict); - assert(PyDict_GetItemString(type_dict, "new_attr")); // borrowed ref - Py_DECREF(HeapTypeNameType); - Py_DECREF(type_dict); - Py_RETURN_NONE; -} - static PyObject * pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -885,61 +637,6 @@ pending_threadfunc(PyObject *self, PyObject *arg, PyObject *kwargs) return PyLong_FromUnsignedLong((unsigned long)num_added); } -/* Test PyOS_string_to_double. */ -static PyObject * -test_string_to_double(PyObject *self, PyObject *Py_UNUSED(ignored)) { - double result; - const char *msg; - -#define CHECK_STRING(STR, expected) \ - do { \ - result = PyOS_string_to_double(STR, NULL, NULL); \ - if (result == -1.0 && PyErr_Occurred()) { \ - return NULL; \ - } \ - if (result != (double)expected) { \ - msg = "conversion of " STR " to float failed"; \ - goto fail; \ - } \ - } while (0) - -#define CHECK_INVALID(STR) \ - do { \ - result = PyOS_string_to_double(STR, NULL, NULL); \ - if (result == -1.0 && PyErr_Occurred()) { \ - if (PyErr_ExceptionMatches(PyExc_ValueError)) { \ - PyErr_Clear(); \ - } \ - else { \ - return NULL; \ - } \ - } \ - else { \ - msg = "conversion of " STR " didn't raise ValueError"; \ - goto fail; \ - } \ - } while (0) - - CHECK_STRING("0.1", 0.1); - CHECK_STRING("1.234", 1.234); - CHECK_STRING("-1.35", -1.35); - CHECK_STRING(".1e01", 1.0); - CHECK_STRING("2.e-2", 0.02); - - CHECK_INVALID(" 0.1"); - CHECK_INVALID("\t\n-3"); - CHECK_INVALID(".123 "); - CHECK_INVALID("3\n"); - CHECK_INVALID("123abc"); - - Py_RETURN_NONE; - fail: - return raiseTestError(self, "test_string_to_double", msg); -#undef CHECK_STRING -#undef CHECK_INVALID -} - - /* Coverage testing of capsule objects. */ static const char *capsule_name = "capsule name"; @@ -1521,48 +1218,6 @@ static PyMethodDef ml = { NULL }; -static PyObject * -_test_incref(PyObject *ob) -{ - return Py_NewRef(ob); -} - -static PyObject * -test_xincref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_XINCREF(_test_incref(obj)); - Py_DECREF(obj); - Py_DECREF(obj); - Py_DECREF(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_incref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_INCREF(_test_incref(obj)); - Py_DECREF(obj); - Py_DECREF(obj); - Py_DECREF(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_xdecref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - Py_XDECREF(PyLong_FromLong(0)); - Py_RETURN_NONE; -} - -static PyObject * -test_decref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - Py_DECREF(PyLong_FromLong(0)); - Py_RETURN_NONE; -} - static PyObject * test_structseq_newtype_doesnt_leak(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(args)) @@ -1609,16 +1264,6 @@ test_structseq_newtype_null_descr_doc(PyObject *Py_UNUSED(self), Py_RETURN_NONE; } -static PyObject * -test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_IncRef(obj); - Py_DecRef(obj); - Py_DecRef(obj); - Py_RETURN_NONE; -} - typedef struct { PyThread_type_lock start_event; PyThread_type_lock exit_event; @@ -1744,7 +1389,7 @@ pymarshal_write_long_to_file(PyObject* self, PyObject *args) &value, &filename, &version)) return NULL; - fp = _Py_fopen_obj(filename, "wb"); + fp = Py_fopen(filename, "wb"); if (fp == NULL) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -1769,7 +1414,7 @@ pymarshal_write_object_to_file(PyObject* self, PyObject *args) &obj, &filename, &version)) return NULL; - fp = _Py_fopen_obj(filename, "wb"); + fp = Py_fopen(filename, "wb"); if (fp == NULL) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -1793,7 +1438,7 @@ pymarshal_read_short_from_file(PyObject* self, PyObject *args) if (!PyArg_ParseTuple(args, "O:pymarshal_read_short_from_file", &filename)) return NULL; - fp = _Py_fopen_obj(filename, "rb"); + fp = Py_fopen(filename, "rb"); if (fp == NULL) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -1818,7 +1463,7 @@ pymarshal_read_long_from_file(PyObject* self, PyObject *args) if (!PyArg_ParseTuple(args, "O:pymarshal_read_long_from_file", &filename)) return NULL; - fp = _Py_fopen_obj(filename, "rb"); + fp = Py_fopen(filename, "rb"); if (fp == NULL) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -1840,7 +1485,7 @@ pymarshal_read_last_object_from_file(PyObject* self, PyObject *args) if (!PyArg_ParseTuple(args, "O:pymarshal_read_last_object_from_file", &filename)) return NULL; - FILE *fp = _Py_fopen_obj(filename, "rb"); + FILE *fp = Py_fopen(filename, "rb"); if (fp == NULL) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -1863,7 +1508,7 @@ pymarshal_read_object_from_file(PyObject* self, PyObject *args) if (!PyArg_ParseTuple(args, "O:pymarshal_read_object_from_file", &filename)) return NULL; - FILE *fp = _Py_fopen_obj(filename, "rb"); + FILE *fp = Py_fopen(filename, "rb"); if (fp == NULL) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -2036,45 +1681,6 @@ bad_get(PyObject *module, PyObject *args) } -#ifdef Py_REF_DEBUG -static PyObject * -negative_refcount(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *obj = PyUnicode_FromString("negative_refcount"); - if (obj == NULL) { - return NULL; - } - assert(Py_REFCNT(obj) == 1); - - Py_SET_REFCNT(obj, 0); - /* Py_DECREF() must call _Py_NegativeRefcount() and abort Python */ - Py_DECREF(obj); - - Py_RETURN_NONE; -} - -static PyObject * -decref_freed_object(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *obj = PyUnicode_FromString("decref_freed_object"); - if (obj == NULL) { - return NULL; - } - assert(Py_REFCNT(obj) == 1); - - // Deallocate the memory - Py_DECREF(obj); - // obj is a now a dangling pointer - - // gh-109496: If Python is built in debug mode, Py_DECREF() must call - // _Py_NegativeRefcount() and abort Python. - Py_DECREF(obj); - - Py_RETURN_NONE; -} -#endif - - /* Functions for testing C calling conventions (METH_*) are named meth_*, * e.g. "meth_varargs" for METH_VARARGS. * @@ -2178,314 +1784,50 @@ pynumber_tobase(PyObject *module, PyObject *args) return PyNumber_ToBase(obj, base); } -static PyObject* -test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyList_New(0); - if (obj == NULL) { - return NULL; +/* We only use 2 in test_capi/test_misc.py. */ +#define NUM_BASIC_STATIC_TYPES 2 +static PyTypeObject BasicStaticTypes[NUM_BASIC_STATIC_TYPES] = { +#define INIT_BASIC_STATIC_TYPE \ + { \ + PyVarObject_HEAD_INIT(NULL, 0) \ + .tp_name = "BasicStaticType", \ + .tp_basicsize = sizeof(PyObject), \ } + INIT_BASIC_STATIC_TYPE, + INIT_BASIC_STATIC_TYPE, +#undef INIT_BASIC_STATIC_TYPE +}; +static int num_basic_static_types_used = 0; - // Ensure that following tests don't modify the object, - // to ensure that Py_DECREF() will not crash. - assert(Py_TYPE(obj) == &PyList_Type); - assert(Py_SIZE(obj) == 0); - - // bpo-39573: Test Py_SET_TYPE() and Py_SET_SIZE() functions. - Py_SET_TYPE(obj, &PyList_Type); - Py_SET_SIZE(obj, 0); - - Py_DECREF(obj); - Py_RETURN_NONE; -} - - -// Test Py_CLEAR() macro -static PyObject* -test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) +static PyObject * +get_basic_static_type(PyObject *self, PyObject *args) { - // simple case with a variable - PyObject *obj = PyList_New(0); - if (obj == NULL) { + PyObject *base = NULL; + if (!PyArg_ParseTuple(args, "|O", &base)) { return NULL; } - Py_CLEAR(obj); - assert(obj == NULL); + assert(base == NULL || PyType_Check(base)); - // gh-98724: complex case, Py_CLEAR() argument has a side effect - PyObject* array[1]; - array[0] = PyList_New(0); - if (array[0] == NULL) { + if(num_basic_static_types_used >= NUM_BASIC_STATIC_TYPES) { + PyErr_SetString(PyExc_RuntimeError, "no more available basic static types"); return NULL; } + PyTypeObject *cls = &BasicStaticTypes[num_basic_static_types_used++]; - PyObject **p = array; - Py_CLEAR(*p++); - assert(array[0] == NULL); - assert(p == array + 1); - - Py_RETURN_NONE; -} - - -// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear() -static PyObject* -test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - // Py_SETREF() simple case with a variable - PyObject *obj = PyList_New(0); - if (obj == NULL) { - return NULL; + if (base != NULL) { + cls->tp_bases = PyTuple_Pack(1, base); + if (cls->tp_bases == NULL) { + return NULL; + } + cls->tp_base = (PyTypeObject *)Py_NewRef(base); } - Py_SETREF(obj, NULL); - assert(obj == NULL); - - // Py_XSETREF() simple case with a variable - PyObject *obj2 = PyList_New(0); - if (obj2 == NULL) { + if (PyType_Ready(cls) < 0) { + Py_DECREF(cls->tp_bases); + Py_DECREF(cls->tp_base); return NULL; } - Py_XSETREF(obj2, NULL); - assert(obj2 == NULL); - // test Py_XSETREF() when the argument is NULL - Py_XSETREF(obj2, NULL); - assert(obj2 == NULL); - - // gh-98724: complex case, Py_SETREF() argument has a side effect - PyObject* array[1]; - array[0] = PyList_New(0); - if (array[0] == NULL) { - return NULL; - } - - PyObject **p = array; - Py_SETREF(*p++, NULL); - assert(array[0] == NULL); - assert(p == array + 1); - - // gh-98724: complex case, Py_XSETREF() argument has a side effect - PyObject* array2[1]; - array2[0] = PyList_New(0); - if (array2[0] == NULL) { - return NULL; - } - - PyObject **p2 = array2; - Py_XSETREF(*p2++, NULL); - assert(array2[0] == NULL); - assert(p2 == array2 + 1); - - // test Py_XSETREF() when the argument is NULL - p2 = array2; - Py_XSETREF(*p2++, NULL); - assert(array2[0] == NULL); - assert(p2 == array2 + 1); - - Py_RETURN_NONE; -} - - -#define TEST_REFCOUNT() \ - do { \ - PyObject *obj = PyList_New(0); \ - if (obj == NULL) { \ - return NULL; \ - } \ - assert(Py_REFCNT(obj) == 1); \ - \ - /* test Py_NewRef() */ \ - PyObject *ref = Py_NewRef(obj); \ - assert(ref == obj); \ - assert(Py_REFCNT(obj) == 2); \ - Py_DECREF(ref); \ - \ - /* test Py_XNewRef() */ \ - PyObject *xref = Py_XNewRef(obj); \ - assert(xref == obj); \ - assert(Py_REFCNT(obj) == 2); \ - Py_DECREF(xref); \ - \ - assert(Py_XNewRef(NULL) == NULL); \ - \ - Py_DECREF(obj); \ - Py_RETURN_NONE; \ - } while (0) - - -// Test Py_NewRef() and Py_XNewRef() macros -static PyObject* -test_refcount_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_REFCOUNT(); -} - -#undef Py_NewRef -#undef Py_XNewRef - -// Test Py_NewRef() and Py_XNewRef() functions, after undefining macros. -static PyObject* -test_refcount_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_REFCOUNT(); -} - - -// Test Py_Is() function -#define TEST_PY_IS() \ - do { \ - PyObject *o_none = Py_None; \ - PyObject *o_true = Py_True; \ - PyObject *o_false = Py_False; \ - PyObject *obj = PyList_New(0); \ - if (obj == NULL) { \ - return NULL; \ - } \ - \ - /* test Py_Is() */ \ - assert(Py_Is(obj, obj)); \ - assert(!Py_Is(obj, o_none)); \ - \ - /* test Py_None */ \ - assert(Py_Is(o_none, o_none)); \ - assert(!Py_Is(obj, o_none)); \ - \ - /* test Py_True */ \ - assert(Py_Is(o_true, o_true)); \ - assert(!Py_Is(o_false, o_true)); \ - assert(!Py_Is(obj, o_true)); \ - \ - /* test Py_False */ \ - assert(Py_Is(o_false, o_false)); \ - assert(!Py_Is(o_true, o_false)); \ - assert(!Py_Is(obj, o_false)); \ - \ - Py_DECREF(obj); \ - Py_RETURN_NONE; \ - } while (0) - -// Test Py_Is() macro -static PyObject* -test_py_is_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_PY_IS(); -} - -#undef Py_Is - -// Test Py_Is() function, after undefining its macro. -static PyObject* -test_py_is_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_PY_IS(); -} - - -// type->tp_version_tag -static PyObject * -type_get_version(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - PyObject *res = PyLong_FromUnsignedLong( - ((PyTypeObject *)type)->tp_version_tag); - if (res == NULL) { - assert(PyErr_Occurred()); - return NULL; - } - return res; -} - -static PyObject * -type_modified(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - PyType_Modified((PyTypeObject *)type); - Py_RETURN_NONE; -} - - -static PyObject * -type_assign_version(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - int res = PyUnstable_Type_AssignVersionTag((PyTypeObject *)type); - return PyLong_FromLong(res); -} - - -static PyObject * -type_get_tp_bases(PyObject *self, PyObject *type) -{ - PyObject *bases = ((PyTypeObject *)type)->tp_bases; - if (bases == NULL) { - Py_RETURN_NONE; - } - return Py_NewRef(bases); -} - -static PyObject * -type_get_tp_mro(PyObject *self, PyObject *type) -{ - PyObject *mro = ((PyTypeObject *)type)->tp_mro; - if (mro == NULL) { - Py_RETURN_NONE; - } - return Py_NewRef(mro); -} - - -/* We only use 2 in test_capi/test_misc.py. */ -#define NUM_BASIC_STATIC_TYPES 2 -static PyTypeObject BasicStaticTypes[NUM_BASIC_STATIC_TYPES] = { -#define INIT_BASIC_STATIC_TYPE \ - { \ - PyVarObject_HEAD_INIT(NULL, 0) \ - .tp_name = "BasicStaticType", \ - .tp_basicsize = sizeof(PyObject), \ - } - INIT_BASIC_STATIC_TYPE, - INIT_BASIC_STATIC_TYPE, -#undef INIT_BASIC_STATIC_TYPE -}; -static int num_basic_static_types_used = 0; - -static PyObject * -get_basic_static_type(PyObject *self, PyObject *args) -{ - PyObject *base = NULL; - if (!PyArg_ParseTuple(args, "|O", &base)) { - return NULL; - } - assert(base == NULL || PyType_Check(base)); - - if(num_basic_static_types_used >= NUM_BASIC_STATIC_TYPES) { - PyErr_SetString(PyExc_RuntimeError, "no more available basic static types"); - return NULL; - } - PyTypeObject *cls = &BasicStaticTypes[num_basic_static_types_used++]; - - if (base != NULL) { - cls->tp_bases = PyTuple_Pack(1, base); - if (cls->tp_bases == NULL) { - return NULL; - } - cls->tp_base = (PyTypeObject *)Py_NewRef(base); - } - if (PyType_Ready(cls) < 0) { - Py_DECREF(cls->tp_bases); - Py_DECREF(cls->tp_base); - return NULL; - } - return (PyObject *)cls; -} + return (PyObject *)cls; +} // Test PyThreadState C API @@ -2533,109 +1875,6 @@ test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } -static PyObject * -frame_getlocals(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetLocals((PyFrameObject *)frame); -} - -static PyObject * -frame_getglobals(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetGlobals((PyFrameObject *)frame); -} - -static PyObject * -frame_getgenerator(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetGenerator((PyFrameObject *)frame); -} - -static PyObject * -frame_getbuiltins(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetBuiltins((PyFrameObject *)frame); -} - -static PyObject * -frame_getlasti(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - int lasti = PyFrame_GetLasti((PyFrameObject *)frame); - if (lasti < 0) { - assert(lasti == -1); - Py_RETURN_NONE; - } - return PyLong_FromLong(lasti); -} - -static PyObject * -frame_new(PyObject *self, PyObject *args) -{ - PyObject *code, *globals, *locals; - if (!PyArg_ParseTuple(args, "OOO", &code, &globals, &locals)) { - return NULL; - } - if (!PyCode_Check(code)) { - PyErr_SetString(PyExc_TypeError, "argument must be a code object"); - return NULL; - } - PyThreadState *tstate = PyThreadState_Get(); - - return (PyObject *)PyFrame_New(tstate, (PyCodeObject *)code, globals, locals); -} - -static PyObject * -test_frame_getvar(PyObject *self, PyObject *args) -{ - PyObject *frame, *name; - if (!PyArg_ParseTuple(args, "OO", &frame, &name)) { - return NULL; - } - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - - return PyFrame_GetVar((PyFrameObject *)frame, name); -} - -static PyObject * -test_frame_getvarstring(PyObject *self, PyObject *args) -{ - PyObject *frame; - const char *name; - if (!PyArg_ParseTuple(args, "Oy", &frame, &name)) { - return NULL; - } - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - - return PyFrame_GetVarString((PyFrameObject *)frame, name); -} - - static PyObject * gen_get_code(PyObject *self, PyObject *gen) { @@ -2903,14 +2142,6 @@ settrace_to_error(PyObject *self, PyObject *list) Py_RETURN_NONE; } -static PyObject * -clear_managed_dict(PyObject *self, PyObject *obj) -{ - PyObject_ClearManagedDict(obj); - Py_RETURN_NONE; -} - - static PyObject * test_macros(PyObject *self, PyObject *Py_UNUSED(args)) { @@ -2947,165 +2178,6 @@ test_macros(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } -static PyObject * -function_get_code(PyObject *self, PyObject *func) -{ - PyObject *code = PyFunction_GetCode(func); - if (code != NULL) { - return Py_NewRef(code); - } else { - return NULL; - } -} - -static PyObject * -function_get_globals(PyObject *self, PyObject *func) -{ - PyObject *globals = PyFunction_GetGlobals(func); - if (globals != NULL) { - return Py_NewRef(globals); - } else { - return NULL; - } -} - -static PyObject * -function_get_module(PyObject *self, PyObject *func) -{ - PyObject *module = PyFunction_GetModule(func); - if (module != NULL) { - return Py_NewRef(module); - } else { - return NULL; - } -} - -static PyObject * -function_get_defaults(PyObject *self, PyObject *func) -{ - PyObject *defaults = PyFunction_GetDefaults(func); - if (defaults != NULL) { - return Py_NewRef(defaults); - } else if (PyErr_Occurred()) { - return NULL; - } else { - Py_RETURN_NONE; // This can happen when `defaults` are set to `None` - } -} - -static PyObject * -function_set_defaults(PyObject *self, PyObject *args) -{ - PyObject *func = NULL, *defaults = NULL; - if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { - return NULL; - } - int result = PyFunction_SetDefaults(func, defaults); - if (result == -1) - return NULL; - Py_RETURN_NONE; -} - -static PyObject * -function_get_kw_defaults(PyObject *self, PyObject *func) -{ - PyObject *defaults = PyFunction_GetKwDefaults(func); - if (defaults != NULL) { - return Py_NewRef(defaults); - } else if (PyErr_Occurred()) { - return NULL; - } else { - Py_RETURN_NONE; // This can happen when `kwdefaults` are set to `None` - } -} - -static PyObject * -function_set_kw_defaults(PyObject *self, PyObject *args) -{ - PyObject *func = NULL, *defaults = NULL; - if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { - return NULL; - } - int result = PyFunction_SetKwDefaults(func, defaults); - if (result == -1) - return NULL; - Py_RETURN_NONE; -} - -static PyObject * -function_get_closure(PyObject *self, PyObject *func) -{ - PyObject *closure = PyFunction_GetClosure(func); - if (closure != NULL) { - return Py_NewRef(closure); - } else if (PyErr_Occurred()) { - return NULL; - } else { - Py_RETURN_NONE; // This can happen when `closure` is set to `None` - } -} - -static PyObject * -function_set_closure(PyObject *self, PyObject *args) -{ - PyObject *func = NULL, *closure = NULL; - if (!PyArg_ParseTuple(args, "OO", &func, &closure)) { - return NULL; - } - int result = PyFunction_SetClosure(func, closure); - if (result == -1) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -check_pyimport_addmodule(PyObject *self, PyObject *args) -{ - const char *name; - if (!PyArg_ParseTuple(args, "s", &name)) { - return NULL; - } - - // test PyImport_AddModuleRef() - PyObject *module = PyImport_AddModuleRef(name); - if (module == NULL) { - return NULL; - } - assert(PyModule_Check(module)); - // module is a strong reference - - // test PyImport_AddModule() - PyObject *module2 = PyImport_AddModule(name); - if (module2 == NULL) { - goto error; - } - assert(PyModule_Check(module2)); - assert(module2 == module); - // module2 is a borrowed ref - - // test PyImport_AddModuleObject() - PyObject *name_obj = PyUnicode_FromString(name); - if (name_obj == NULL) { - goto error; - } - PyObject *module3 = PyImport_AddModuleObject(name_obj); - Py_DECREF(name_obj); - if (module3 == NULL) { - goto error; - } - assert(PyModule_Check(module3)); - assert(module3 == module); - // module3 is a borrowed ref - - return module; - -error: - Py_DECREF(module); - return NULL; -} - - static PyObject * test_weakref_capi(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) { @@ -3354,19 +2426,6 @@ finalize_thread_hang(PyObject *self, PyObject *callback) } -static PyObject * -type_freeze(PyObject *module, PyObject *args) -{ - PyTypeObject *type; - if (!PyArg_ParseTuple(args, "O!", &PyType_Type, &type)) { - return NULL; - } - if (PyType_Freeze(type) < 0) { - return NULL; - } - Py_RETURN_NONE; -} - struct atexit_data { int called; PyThreadState *tstate; @@ -3415,26 +2474,39 @@ test_atexit(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } +static PyObject* +code_offset_to_line(PyObject* self, PyObject* const* args, Py_ssize_t nargsf) +{ + Py_ssize_t nargs = _PyVectorcall_NARGS(nargsf); + if (nargs != 2) { + PyErr_SetString(PyExc_TypeError, "code_offset_to_line takes 2 arguments"); + return NULL; + } + int offset; + if (PyLong_AsInt32(args[1], &offset) < 0) { + return NULL; + } + PyCodeObject *code = (PyCodeObject *)args[0]; + if (!PyCode_Check(code)) { + PyErr_SetString(PyExc_TypeError, "first arg must be a code object"); + return NULL; + } + return PyLong_FromInt32(PyCode_Addr2Line(code, offset)); +} + + static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, {"test_sizeof_c_types", test_sizeof_c_types, METH_NOARGS}, - {"test_list_api", test_list_api, METH_NOARGS}, - {"test_dict_iteration", test_dict_iteration, METH_NOARGS}, {"test_lazy_hash_inheritance", test_lazy_hash_inheritance,METH_NOARGS}, - {"test_xincref_doesnt_leak",test_xincref_doesnt_leak, METH_NOARGS}, - {"test_incref_doesnt_leak", test_incref_doesnt_leak, METH_NOARGS}, - {"test_xdecref_doesnt_leak",test_xdecref_doesnt_leak, METH_NOARGS}, - {"test_decref_doesnt_leak", test_decref_doesnt_leak, METH_NOARGS}, {"test_structseq_newtype_doesnt_leak", test_structseq_newtype_doesnt_leak, METH_NOARGS}, {"test_structseq_newtype_null_descr_doc", test_structseq_newtype_null_descr_doc, METH_NOARGS}, - {"test_incref_decref_API", test_incref_decref_API, METH_NOARGS}, {"pyobject_repr_from_null", pyobject_repr_from_null, METH_NOARGS}, {"pyobject_str_from_null", pyobject_str_from_null, METH_NOARGS}, {"pyobject_bytes_from_null", pyobject_bytes_from_null, METH_NOARGS}, - {"test_string_to_double", test_string_to_double, METH_NOARGS}, {"test_capsule", (PyCFunction)test_capsule, METH_NOARGS}, {"test_from_contiguous", (PyCFunction)test_from_contiguous, METH_NOARGS}, #if (defined(__linux__) || defined(__FreeBSD__)) && defined(__GNUC__) @@ -3445,13 +2517,6 @@ static PyMethodDef TestMethods[] = { {"py_buildvalue", py_buildvalue, METH_VARARGS}, {"py_buildvalue_ints", py_buildvalue_ints, METH_VARARGS}, {"test_buildvalue_N", test_buildvalue_N, METH_NOARGS}, - {"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS}, - {"get_heaptype_for_name", get_heaptype_for_name, METH_NOARGS}, - {"get_type_name", get_type_name, METH_O}, - {"get_type_qualname", get_type_qualname, METH_O}, - {"get_type_fullyqualname", get_type_fullyqualname, METH_O}, - {"get_type_module_name", get_type_module_name, METH_O}, - {"test_get_type_dict", test_get_type_dict, METH_NOARGS}, {"test_reftracer", test_reftracer, METH_NOARGS}, {"_test_thread_state", test_thread_state, METH_VARARGS}, {"gilstate_ensure_release", gilstate_ensure_release, METH_NOARGS}, @@ -3500,10 +2565,6 @@ static PyMethodDef TestMethods[] = { #endif {"test_pythread_tss_key_state", test_pythread_tss_key_state, METH_VARARGS}, {"bad_get", bad_get, METH_VARARGS}, -#ifdef Py_REF_DEBUG - {"negative_refcount", negative_refcount, METH_NOARGS}, - {"decref_freed_object", decref_freed_object, METH_NOARGS}, -#endif {"meth_varargs", meth_varargs, METH_VARARGS}, {"meth_varargs_keywords", _PyCFunction_CAST(meth_varargs_keywords), METH_VARARGS|METH_KEYWORDS}, {"meth_o", meth_o, METH_O}, @@ -3512,51 +2573,20 @@ static PyMethodDef TestMethods[] = { {"meth_fastcall_keywords", _PyCFunction_CAST(meth_fastcall_keywords), METH_FASTCALL|METH_KEYWORDS}, {"pycfunction_call", test_pycfunction_call, METH_VARARGS}, {"pynumber_tobase", pynumber_tobase, METH_VARARGS}, - {"test_set_type_size", test_set_type_size, METH_NOARGS}, - {"test_py_clear", test_py_clear, METH_NOARGS}, - {"test_py_setref", test_py_setref, METH_NOARGS}, - {"test_refcount_macros", test_refcount_macros, METH_NOARGS}, - {"test_refcount_funcs", test_refcount_funcs, METH_NOARGS}, - {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, - {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, - {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, - {"type_modified", type_modified, METH_O, PyDoc_STR("PyType_Modified")}, - {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")}, - {"type_get_tp_bases", type_get_tp_bases, METH_O}, - {"type_get_tp_mro", type_get_tp_mro, METH_O}, {"get_basic_static_type", get_basic_static_type, METH_VARARGS, NULL}, {"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL}, - {"frame_getlocals", frame_getlocals, METH_O, NULL}, - {"frame_getglobals", frame_getglobals, METH_O, NULL}, - {"frame_getgenerator", frame_getgenerator, METH_O, NULL}, - {"frame_getbuiltins", frame_getbuiltins, METH_O, NULL}, - {"frame_getlasti", frame_getlasti, METH_O, NULL}, - {"frame_new", frame_new, METH_VARARGS, NULL}, - {"frame_getvar", test_frame_getvar, METH_VARARGS, NULL}, - {"frame_getvarstring", test_frame_getvarstring, METH_VARARGS, NULL}, {"gen_get_code", gen_get_code, METH_O, NULL}, {"get_feature_macros", get_feature_macros, METH_NOARGS, NULL}, {"test_code_api", test_code_api, METH_NOARGS, NULL}, {"settrace_to_error", settrace_to_error, METH_O, NULL}, {"settrace_to_record", settrace_to_record, METH_O, NULL}, {"test_macros", test_macros, METH_NOARGS, NULL}, - {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, - {"function_get_code", function_get_code, METH_O, NULL}, - {"function_get_globals", function_get_globals, METH_O, NULL}, - {"function_get_module", function_get_module, METH_O, NULL}, - {"function_get_defaults", function_get_defaults, METH_O, NULL}, - {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, - {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, - {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, - {"function_get_closure", function_get_closure, METH_O, NULL}, - {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, - {"check_pyimport_addmodule", check_pyimport_addmodule, METH_VARARGS}, {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, {"function_set_warning", function_set_warning, METH_NOARGS}, {"test_critical_sections", test_critical_sections, METH_NOARGS}, {"finalize_thread_hang", finalize_thread_hang, METH_O, NULL}, - {"type_freeze", type_freeze, METH_VARARGS}, {"test_atexit", test_atexit, METH_NOARGS}, + {"code_offset_to_line", _PyCFunction_CAST(code_offset_to_line), METH_FASTCALL}, {NULL, NULL} /* sentinel */ }; @@ -4029,6 +3059,61 @@ static PyTypeObject ContainerNoGC_type = { .tp_new = ContainerNoGC_new, }; +/* Manually allocated heap type */ + +typedef struct { + PyObject_HEAD + PyObject *dict; +} ManualHeapType; + +static int +ManualHeapType_traverse(PyObject *self, visitproc visit, void *arg) +{ + ManualHeapType *mht = (ManualHeapType *)self; + Py_VISIT(mht->dict); + return 0; +} + +static void +ManualHeapType_dealloc(PyObject *self) +{ + ManualHeapType *mht = (ManualHeapType *)self; + PyObject_GC_UnTrack(self); + Py_XDECREF(mht->dict); + PyTypeObject *type = Py_TYPE(self); + Py_TYPE(self)->tp_free(self); + Py_DECREF(type); +} + +static PyObject * +create_manual_heap_type(void) +{ + // gh-128923: Ensure that a heap type allocated through PyType_Type.tp_alloc + // with minimal initialization works correctly. + PyHeapTypeObject *heap_type = (PyHeapTypeObject *)PyType_Type.tp_alloc(&PyType_Type, 0); + if (heap_type == NULL) { + return NULL; + } + PyTypeObject* type = &heap_type->ht_type; + type->tp_basicsize = sizeof(ManualHeapType); + type->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HEAPTYPE | Py_TPFLAGS_HAVE_GC; + type->tp_new = PyType_GenericNew; + type->tp_name = "ManualHeapType"; + type->tp_dictoffset = offsetof(ManualHeapType, dict); + type->tp_traverse = ManualHeapType_traverse; + type->tp_dealloc = ManualHeapType_dealloc; + heap_type->ht_name = PyUnicode_FromString(type->tp_name); + if (!heap_type->ht_name) { + Py_DECREF(type); + return NULL; + } + heap_type->ht_qualname = Py_NewRef(heap_type->ht_name); + if (PyType_Ready(type) < 0) { + Py_DECREF(type); + return NULL; + } + return (PyObject *)type; +} static struct PyModuleDef _testcapimodule = { PyModuleDef_HEAD_INIT, @@ -4163,6 +3248,15 @@ PyInit__testcapi(void) (PyObject *) &ContainerNoGC_type) < 0) return NULL; + PyObject *manual_heap_type = create_manual_heap_type(); + if (manual_heap_type == NULL) { + return NULL; + } + if (PyModule_Add(m, "ManualHeapType", manual_heap_type) < 0) { + return NULL; + } + + /* Include tests from the _testcapi/ directory */ if (_PyTestCapi_Init_Vectorcall(m) < 0) { return NULL; @@ -4263,6 +3357,18 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Config(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Import(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Frame(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Type(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Function(m) < 0) { + return NULL; + } PyState_AddModule(m, &_testcapimodule); return m; diff --git a/Modules/_testexternalinspection.c b/Modules/_testexternalinspection.c index 0807d1e47b6736..22074c81b7405f 100644 --- a/Modules/_testexternalinspection.c +++ b/Modules/_testexternalinspection.c @@ -59,10 +59,30 @@ # define HAVE_PROCESS_VM_READV 0 #endif +struct _Py_AsyncioModuleDebugOffsets { + struct _asyncio_task_object { + uint64_t size; + uint64_t task_name; + uint64_t task_awaited_by; + uint64_t task_is_task; + uint64_t task_awaited_by_is_set; + uint64_t task_coro; + } asyncio_task_object; + struct _asyncio_thread_state { + uint64_t size; + uint64_t asyncio_running_loop; + uint64_t asyncio_running_task; + } asyncio_thread_state; +}; + #if defined(__APPLE__) && TARGET_OS_OSX -static void* -analyze_macho64(mach_port_t proc_ref, void* base, void* map) -{ +static uintptr_t +return_section_address( + const char* section, + mach_port_t proc_ref, + uintptr_t base, + void* map +) { struct mach_header_64* hdr = (struct mach_header_64*)map; int ncmds = hdr->ncmds; @@ -72,35 +92,40 @@ analyze_macho64(mach_port_t proc_ref, void* base, void* map) mach_vm_size_t size = 0; mach_msg_type_number_t count = sizeof(vm_region_basic_info_data_64_t); mach_vm_address_t address = (mach_vm_address_t)base; - vm_region_basic_info_data_64_t region_info; + vm_region_basic_info_data_64_t r_info; mach_port_t object_name; + uintptr_t vmaddr = 0; for (int i = 0; cmd_cnt < 2 && i < ncmds; i++) { + if (cmd->cmd == LC_SEGMENT_64 && strcmp(cmd->segname, "__TEXT") == 0) { + vmaddr = cmd->vmaddr; + } if (cmd->cmd == LC_SEGMENT_64 && strcmp(cmd->segname, "__DATA") == 0) { while (cmd->filesize != size) { address += size; - if (mach_vm_region( - proc_ref, - &address, - &size, - VM_REGION_BASIC_INFO_64, - (vm_region_info_t)®ion_info, // cppcheck-suppress [uninitvar] - &count, - &object_name) - != KERN_SUCCESS) - { - PyErr_SetString(PyExc_RuntimeError, "Cannot get any more VM maps.\n"); - return NULL; + kern_return_t ret = mach_vm_region( + proc_ref, + &address, + &size, + VM_REGION_BASIC_INFO_64, + (vm_region_info_t)&r_info, // cppcheck-suppress [uninitvar] + &count, + &object_name + ); + if (ret != KERN_SUCCESS) { + PyErr_SetString( + PyExc_RuntimeError, "Cannot get any more VM maps.\n"); + return 0; } } - base = (void*)address - cmd->vmaddr; int nsects = cmd->nsects; - struct section_64* sec = - (struct section_64*)((void*)cmd + sizeof(struct segment_command_64)); + struct section_64* sec = (struct section_64*)( + (void*)cmd + sizeof(struct segment_command_64) + ); for (int j = 0; j < nsects; j++) { - if (strcmp(sec[j].sectname, "PyRuntime") == 0) { - return base + sec[j].addr; + if (strcmp(sec[j].sectname, section) == 0) { + return base + sec[j].addr - vmaddr; } } cmd_cnt++; @@ -108,33 +133,39 @@ analyze_macho64(mach_port_t proc_ref, void* base, void* map) cmd = (struct segment_command_64*)((void*)cmd + cmd->cmdsize); } - return NULL; + return 0; } -static void* -analyze_macho(char* path, void* base, mach_vm_size_t size, mach_port_t proc_ref) -{ +static uintptr_t +search_section_in_file( + const char* secname, + char* path, + uintptr_t base, + mach_vm_size_t size, + mach_port_t proc_ref +) { int fd = open(path, O_RDONLY); if (fd == -1) { PyErr_Format(PyExc_RuntimeError, "Cannot open binary %s\n", path); - return NULL; + return 0; } struct stat fs; if (fstat(fd, &fs) == -1) { - PyErr_Format(PyExc_RuntimeError, "Cannot get size of binary %s\n", path); + PyErr_Format( + PyExc_RuntimeError, "Cannot get size of binary %s\n", path); close(fd); - return NULL; + return 0; } void* map = mmap(0, fs.st_size, PROT_READ, MAP_SHARED, fd, 0); if (map == MAP_FAILED) { PyErr_Format(PyExc_RuntimeError, "Cannot map binary %s\n", path); close(fd); - return NULL; + return 0; } - void* result = NULL; + uintptr_t result = 0; struct mach_header_64* hdr = (struct mach_header_64*)map; switch (hdr->magic) { @@ -142,11 +173,13 @@ analyze_macho(char* path, void* base, mach_vm_size_t size, mach_port_t proc_ref) case MH_CIGAM: case FAT_MAGIC: case FAT_CIGAM: - PyErr_SetString(PyExc_RuntimeError, "32-bit Mach-O binaries are not supported"); + PyErr_SetString( + PyExc_RuntimeError, + "32-bit Mach-O binaries are not supported"); break; case MH_MAGIC_64: case MH_CIGAM_64: - result = analyze_macho64(proc_ref, base, map); + result = return_section_address(secname, proc_ref, base, map); break; default: PyErr_SetString(PyExc_RuntimeError, "Unknown Mach-O magic"); @@ -174,9 +207,8 @@ pid_to_task(pid_t pid) return task; } -static void* -get_py_runtime_macos(pid_t pid) -{ +static uintptr_t +search_map_for_section(pid_t pid, const char* secname, const char* substr) { mach_vm_address_t address = 0; mach_vm_size_t size = 0; mach_msg_type_number_t count = sizeof(vm_region_basic_info_data_64_t); @@ -186,12 +218,11 @@ get_py_runtime_macos(pid_t pid) mach_port_t proc_ref = pid_to_task(pid); if (proc_ref == 0) { PyErr_SetString(PyExc_PermissionError, "Cannot get task for PID"); - return NULL; + return 0; } int match_found = 0; char map_filename[MAXPATHLEN + 1]; - void* result_address = NULL; while (mach_vm_region( proc_ref, &address, @@ -199,10 +230,16 @@ get_py_runtime_macos(pid_t pid) VM_REGION_BASIC_INFO_64, (vm_region_info_t)®ion_info, &count, - &object_name) - == KERN_SUCCESS) + &object_name) == KERN_SUCCESS) { - int path_len = proc_regionfilename(pid, address, map_filename, MAXPATHLEN); + if ((region_info.protection & VM_PROT_READ) == 0 + || (region_info.protection & VM_PROT_EXECUTE) == 0) { + address += size; + continue; + } + + int path_len = proc_regionfilename( + pid, address, map_filename, MAXPATHLEN); if (path_len == 0) { address += size; continue; @@ -215,26 +252,20 @@ get_py_runtime_macos(pid_t pid) filename = map_filename; // No path, use the whole string } - // Check if the filename starts with "python" or "libpython" - if (!match_found && strncmp(filename, "python", 6) == 0) { - match_found = 1; - result_address = analyze_macho(map_filename, (void*)address, size, proc_ref); - } - if (strncmp(filename, "libpython", 9) == 0) { + if (!match_found && strncmp(filename, substr, strlen(substr)) == 0) { match_found = 1; - result_address = analyze_macho(map_filename, (void*)address, size, proc_ref); - break; + return search_section_in_file( + secname, map_filename, address, size, proc_ref); } address += size; } - return result_address; + return 0; } -#endif -#ifdef __linux__ -void* -find_python_map_start_address(pid_t pid, char* result_filename) +#elif defined(__linux__) +static uintptr_t +find_map_start_address(pid_t pid, char* result_filename, const char* map) { char maps_file_path[64]; sprintf(maps_file_path, "/proc/%d/maps", pid); @@ -242,17 +273,20 @@ find_python_map_start_address(pid_t pid, char* result_filename) FILE* maps_file = fopen(maps_file_path, "r"); if (maps_file == NULL) { PyErr_SetFromErrno(PyExc_OSError); - return NULL; + return 0; } int match_found = 0; char line[256]; char map_filename[PATH_MAX]; - void* result_address = 0; + uintptr_t result_address = 0; while (fgets(line, sizeof(line), maps_file) != NULL) { unsigned long start_address = 0; - sscanf(line, "%lx-%*x %*s %*s %*s %*s %s", &start_address, map_filename); + sscanf( + line, "%lx-%*x %*s %*s %*s %*s %s", + &start_address, map_filename + ); char* filename = strrchr(map_filename, '/'); if (filename != NULL) { filename++; // Move past the '/' @@ -260,15 +294,9 @@ find_python_map_start_address(pid_t pid, char* result_filename) filename = map_filename; // No path, use the whole string } - // Check if the filename starts with "python" or "libpython" - if (!match_found && strncmp(filename, "python", 6) == 0) { - match_found = 1; - result_address = (void*)start_address; - strcpy(result_filename, map_filename); - } - if (strncmp(filename, "libpython", 9) == 0) { + if (!match_found && strncmp(filename, map, strlen(map)) == 0) { match_found = 1; - result_address = (void*)start_address; + result_address = start_address; strcpy(result_filename, map_filename); break; } @@ -283,18 +311,17 @@ find_python_map_start_address(pid_t pid, char* result_filename) return result_address; } -void* -get_py_runtime_linux(pid_t pid) +static uintptr_t +search_map_for_section(pid_t pid, const char* secname, const char* map) { char elf_file[256]; - void* start_address = (void*)find_python_map_start_address(pid, elf_file); + uintptr_t start_address = find_map_start_address(pid, elf_file, map); if (start_address == 0) { - PyErr_SetString(PyExc_RuntimeError, "No memory map associated with python or libpython found"); - return NULL; + return 0; } - void* result = NULL; + uintptr_t result = 0; void* file_memory = NULL; int fd = open(elf_file, O_RDONLY); @@ -317,20 +344,29 @@ get_py_runtime_linux(pid_t pid) Elf_Ehdr* elf_header = (Elf_Ehdr*)file_memory; - Elf_Shdr* section_header_table = (Elf_Shdr*)(file_memory + elf_header->e_shoff); + Elf_Shdr* section_header_table = + (Elf_Shdr*)(file_memory + elf_header->e_shoff); Elf_Shdr* shstrtab_section = §ion_header_table[elf_header->e_shstrndx]; char* shstrtab = (char*)(file_memory + shstrtab_section->sh_offset); - Elf_Shdr* py_runtime_section = NULL; + Elf_Shdr* section = NULL; for (int i = 0; i < elf_header->e_shnum; i++) { - if (strcmp(".PyRuntime", shstrtab + section_header_table[i].sh_name) == 0) { - py_runtime_section = §ion_header_table[i]; + const char* this_sec_name = ( + shstrtab + + section_header_table[i].sh_name + + 1 // "+1" accounts for the leading "." + ); + + if (strcmp(secname, this_sec_name) == 0) { + section = §ion_header_table[i]; break; } } - Elf_Phdr* program_header_table = (Elf_Phdr*)(file_memory + elf_header->e_phoff); + Elf_Phdr* program_header_table = + (Elf_Phdr*)(file_memory + elf_header->e_phoff); + // Find the first PT_LOAD segment Elf_Phdr* first_load_segment = NULL; for (int i = 0; i < elf_header->e_phnum; i++) { @@ -340,10 +376,16 @@ get_py_runtime_linux(pid_t pid) } } - if (py_runtime_section != NULL && first_load_segment != NULL) { - uintptr_t elf_load_addr = first_load_segment->p_vaddr - - (first_load_segment->p_vaddr % first_load_segment->p_align); - result = start_address + py_runtime_section->sh_addr - elf_load_addr; + if (section != NULL && first_load_segment != NULL) { + uintptr_t elf_load_addr = + first_load_segment->p_vaddr - ( + first_load_segment->p_vaddr % first_load_segment->p_align + ); + result = start_address + (uintptr_t)section->sh_addr - elf_load_addr; + } + else { + PyErr_Format(PyExc_KeyError, + "cannot find map for section %s", secname); } exit: @@ -355,10 +397,37 @@ get_py_runtime_linux(pid_t pid) } return result; } +#else +static uintptr_t +search_map_for_section(pid_t pid, const char* secname, const char* map) +{ + return 0; +} #endif -ssize_t -read_memory(pid_t pid, void* remote_address, size_t len, void* dst) +static uintptr_t +get_py_runtime(pid_t pid) +{ + uintptr_t address = search_map_for_section(pid, "PyRuntime", "libpython"); + if (address == 0) { + address = search_map_for_section(pid, "PyRuntime", "python"); + } + return address; +} + +static uintptr_t +get_async_debug(pid_t pid) +{ + uintptr_t result = search_map_for_section(pid, "AsyncioDebug", "_asyncio.cpython"); + if (result == 0 && !PyErr_Occurred()) { + PyErr_SetString(PyExc_RuntimeError, "Cannot find AsyncioDebug section"); + } + return result; +} + + +static ssize_t +read_memory(pid_t pid, uintptr_t remote_address, size_t len, void* dst) { ssize_t total_bytes_read = 0; #if defined(__linux__) && HAVE_PROCESS_VM_READV @@ -394,13 +463,19 @@ read_memory(pid_t pid, void* remote_address, size_t len, void* dst) if (kr != KERN_SUCCESS) { switch (kr) { case KERN_PROTECTION_FAILURE: - PyErr_SetString(PyExc_PermissionError, "Not enough permissions to read memory"); + PyErr_SetString( + PyExc_PermissionError, + "Not enough permissions to read memory"); break; case KERN_INVALID_ARGUMENT: - PyErr_SetString(PyExc_PermissionError, "Invalid argument to mach_vm_read_overwrite"); + PyErr_SetString( + PyExc_PermissionError, + "Invalid argument to mach_vm_read_overwrite"); break; default: - PyErr_SetString(PyExc_RuntimeError, "Unknown error reading memory"); + PyErr_SetString( + PyExc_RuntimeError, + "Unknown error reading memory"); } return -1; } @@ -411,13 +486,22 @@ read_memory(pid_t pid, void* remote_address, size_t len, void* dst) return total_bytes_read; } -int -read_string(pid_t pid, _Py_DebugOffsets* debug_offsets, void* address, char* buffer, Py_ssize_t size) -{ +static int +read_string( + pid_t pid, + _Py_DebugOffsets* debug_offsets, + uintptr_t address, + char* buffer, + Py_ssize_t size +) { Py_ssize_t len; - ssize_t bytes_read = - read_memory(pid, address + debug_offsets->unicode_object.length, sizeof(Py_ssize_t), &len); - if (bytes_read == -1) { + ssize_t bytes_read = read_memory( + pid, + address + debug_offsets->unicode_object.length, + sizeof(Py_ssize_t), + &len + ); + if (bytes_read < 0) { return -1; } if (len >= size) { @@ -426,51 +510,652 @@ read_string(pid_t pid, _Py_DebugOffsets* debug_offsets, void* address, char* buf } size_t offset = debug_offsets->unicode_object.asciiobject_size; bytes_read = read_memory(pid, address + offset, len, buffer); - if (bytes_read == -1) { + if (bytes_read < 0) { return -1; } buffer[len] = '\0'; return 0; } -void* -get_py_runtime(pid_t pid) + +static inline int +read_ptr(pid_t pid, uintptr_t address, uintptr_t *ptr_addr) { -#if defined(__linux__) - return get_py_runtime_linux(pid); -#elif defined(__APPLE__) && TARGET_OS_OSX - return get_py_runtime_macos(pid); -#else - return NULL; -#endif + int bytes_read = read_memory(pid, address, sizeof(void*), ptr_addr); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static inline int +read_ssize_t(pid_t pid, uintptr_t address, Py_ssize_t *size) +{ + int bytes_read = read_memory(pid, address, sizeof(Py_ssize_t), size); + if (bytes_read < 0) { + return -1; + } + return 0; } static int -parse_code_object( - int pid, - PyObject* result, - struct _Py_DebugOffsets* offsets, - void* address, - void** previous_frame) +read_py_ptr(pid_t pid, uintptr_t address, uintptr_t *ptr_addr) +{ + if (read_ptr(pid, address, ptr_addr)) { + return -1; + } + *ptr_addr &= ~Py_TAG_BITS; + return 0; +} + +static int +read_char(pid_t pid, uintptr_t address, char *result) +{ + int bytes_read = read_memory(pid, address, sizeof(char), result); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_int(pid_t pid, uintptr_t address, int *result) +{ + int bytes_read = read_memory(pid, address, sizeof(int), result); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_unsigned_long(pid_t pid, uintptr_t address, unsigned long *result) +{ + int bytes_read = read_memory(pid, address, sizeof(unsigned long), result); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_pyobj(pid_t pid, uintptr_t address, PyObject *ptr_addr) { - void* address_of_function_name; - read_memory( + int bytes_read = read_memory(pid, address, sizeof(PyObject), ptr_addr); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static PyObject * +read_py_str( + pid_t pid, + _Py_DebugOffsets* debug_offsets, + uintptr_t address, + ssize_t max_len +) { + assert(max_len > 0); + + PyObject *result = NULL; + + char *buf = (char *)PyMem_RawMalloc(max_len); + if (buf == NULL) { + PyErr_NoMemory(); + return NULL; + } + if (read_string(pid, debug_offsets, address, buf, max_len)) { + goto err; + } + + result = PyUnicode_FromString(buf); + if (result == NULL) { + goto err; + } + + PyMem_RawFree(buf); + assert(result != NULL); + return result; + +err: + PyMem_RawFree(buf); + return NULL; +} + +static long +read_py_long(pid_t pid, _Py_DebugOffsets* offsets, uintptr_t address) +{ + unsigned int shift = PYLONG_BITS_IN_DIGIT; + + ssize_t size; + uintptr_t lv_tag; + + int bytes_read = read_memory( + pid, address + offsets->long_object.lv_tag, + sizeof(uintptr_t), + &lv_tag); + if (bytes_read < 0) { + return -1; + } + + int negative = (lv_tag & 3) == 2; + size = lv_tag >> 3; + + if (size == 0) { + return 0; + } + + digit *digits = (digit *)PyMem_RawMalloc(size * sizeof(digit)); + if (!digits) { + PyErr_NoMemory(); + return -1; + } + + bytes_read = read_memory( + pid, + address + offsets->long_object.ob_digit, + sizeof(digit) * size, + digits + ); + if (bytes_read < 0) { + goto error; + } + + long value = 0; + + // In theory this can overflow, but because of llvm/llvm-project#16778 + // we can't use __builtin_mul_overflow because it fails to link with + // __muloti4 on aarch64. In practice this is fine because all we're + // testing here are task numbers that would fit in a single byte. + for (ssize_t i = 0; i < size; ++i) { + long long factor = digits[i] * (1UL << (ssize_t)(shift * i)); + value += factor; + } + PyMem_RawFree(digits); + if (negative) { + value *= -1; + } + return value; +error: + PyMem_RawFree(digits); + return -1; +} + +static PyObject * +parse_task_name( + int pid, + _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address +) { + uintptr_t task_name_addr; + int err = read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_name, + &task_name_addr); + if (err) { + return NULL; + } + + // The task name can be a long or a string so we need to check the type + + PyObject task_name_obj; + err = read_pyobj( + pid, + task_name_addr, + &task_name_obj); + if (err) { + return NULL; + } + + unsigned long flags; + err = read_unsigned_long( + pid, + (uintptr_t)task_name_obj.ob_type + offsets->type_object.tp_flags, + &flags); + if (err) { + return NULL; + } + + if ((flags & Py_TPFLAGS_LONG_SUBCLASS)) { + long res = read_py_long(pid, offsets, task_name_addr); + if (res == -1) { + PyErr_SetString(PyExc_RuntimeError, "Failed to get task name"); + return NULL; + } + return PyUnicode_FromFormat("Task-%d", res); + } + + if(!(flags & Py_TPFLAGS_UNICODE_SUBCLASS)) { + PyErr_SetString(PyExc_RuntimeError, "Invalid task name object"); + return NULL; + } + + return read_py_str( + pid, + offsets, + task_name_addr, + 255 + ); +} + +static int +parse_coro_chain( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t coro_address, + PyObject *render_to +) { + assert((void*)coro_address != NULL); + + uintptr_t gen_type_addr; + int err = read_ptr( + pid, + coro_address + sizeof(void*), + &gen_type_addr); + if (err) { + return -1; + } + + uintptr_t gen_name_addr; + err = read_py_ptr( + pid, + coro_address + offsets->gen_object.gi_name, + &gen_name_addr); + if (err) { + return -1; + } + + PyObject *name = read_py_str( + pid, + offsets, + gen_name_addr, + 255 + ); + if (name == NULL) { + return -1; + } + + if (PyList_Append(render_to, name)) { + return -1; + } + Py_DECREF(name); + + int gi_frame_state; + err = read_int( + pid, + coro_address + offsets->gen_object.gi_frame_state, + &gi_frame_state); + + if (gi_frame_state == FRAME_SUSPENDED_YIELD_FROM) { + char owner; + err = read_char( pid, - (void*)(address + offsets->code_object.name), - sizeof(void*), - &address_of_function_name); + coro_address + offsets->gen_object.gi_iframe + + offsets->interpreter_frame.owner, + &owner + ); + if (err) { + return -1; + } + if (owner != FRAME_OWNED_BY_GENERATOR) { + PyErr_SetString( + PyExc_RuntimeError, + "generator doesn't own its frame \\_o_/"); + return -1; + } - if (address_of_function_name == NULL) { - PyErr_SetString(PyExc_RuntimeError, "No function name found"); + uintptr_t stackpointer_addr; + err = read_py_ptr( + pid, + coro_address + offsets->gen_object.gi_iframe + + offsets->interpreter_frame.stackpointer, + &stackpointer_addr); + if (err) { + return -1; + } + + if ((void*)stackpointer_addr != NULL) { + uintptr_t gi_await_addr; + err = read_py_ptr( + pid, + stackpointer_addr - sizeof(void*), + &gi_await_addr); + if (err) { + return -1; + } + + if ((void*)gi_await_addr != NULL) { + uintptr_t gi_await_addr_type_addr; + int err = read_ptr( + pid, + gi_await_addr + sizeof(void*), + &gi_await_addr_type_addr); + if (err) { + return -1; + } + + if (gen_type_addr == gi_await_addr_type_addr) { + /* This needs an explanation. We always start with parsing + native coroutine / generator frames. Ultimately they + are awaiting on something. That something can be + a native coroutine frame or... an iterator. + If it's the latter -- we can't continue building + our chain. So the condition to bail out of this is + to do that when the type of the current coroutine + doesn't match the type of whatever it points to + in its cr_await. + */ + err = parse_coro_chain( + pid, + offsets, + async_offsets, + gi_await_addr, + render_to + ); + if (err) { + return -1; + } + } + } + } + + } + + return 0; +} + + +static int +parse_task_awaited_by( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address, + PyObject *awaited_by +); + + +static int +parse_task( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address, + PyObject *render_to +) { + char is_task; + int err = read_char( + pid, + task_address + async_offsets->asyncio_task_object.task_is_task, + &is_task); + if (err) { + return -1; + } + + uintptr_t refcnt; + read_ptr(pid, task_address + sizeof(Py_ssize_t), &refcnt); + + PyObject* result = PyList_New(0); + if (result == NULL) { + return -1; + } + + PyObject *call_stack = PyList_New(0); + if (call_stack == NULL) { + goto err; + } + if (PyList_Append(result, call_stack)) { + Py_DECREF(call_stack); + goto err; + } + /* we can operate on a borrowed one to simplify cleanup */ + Py_DECREF(call_stack); + + if (is_task) { + PyObject *tn = parse_task_name( + pid, offsets, async_offsets, task_address); + if (tn == NULL) { + goto err; + } + if (PyList_Append(result, tn)) { + Py_DECREF(tn); + goto err; + } + Py_DECREF(tn); + + uintptr_t coro_addr; + err = read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_coro, + &coro_addr); + if (err) { + goto err; + } + + if ((void*)coro_addr != NULL) { + err = parse_coro_chain( + pid, + offsets, + async_offsets, + coro_addr, + call_stack + ); + if (err) { + goto err; + } + + if (PyList_Reverse(call_stack)) { + goto err; + } + } + } + + if (PyList_Append(render_to, result)) { + goto err; + } + Py_DECREF(result); + + PyObject *awaited_by = PyList_New(0); + if (awaited_by == NULL) { + goto err; + } + if (PyList_Append(result, awaited_by)) { + Py_DECREF(awaited_by); + goto err; + } + /* we can operate on a borrowed one to simplify cleanup */ + Py_DECREF(awaited_by); + + if (parse_task_awaited_by(pid, offsets, async_offsets, + task_address, awaited_by) + ) { + goto err; + } + + return 0; + +err: + Py_DECREF(result); + return -1; +} + +static int +parse_tasks_in_set( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t set_addr, + PyObject *awaited_by +) { + uintptr_t set_obj; + if (read_py_ptr( + pid, + set_addr, + &set_obj) + ) { + return -1; + } + + Py_ssize_t num_els; + if (read_ssize_t( + pid, + set_obj + offsets->set_object.used, + &num_els) + ) { + return -1; + } + + Py_ssize_t set_len; + if (read_ssize_t( + pid, + set_obj + offsets->set_object.mask, + &set_len) + ) { + return -1; + } + set_len++; // The set contains the `mask+1` element slots. + + uintptr_t table_ptr; + if (read_ptr( + pid, + set_obj + offsets->set_object.table, + &table_ptr) + ) { + return -1; + } + + Py_ssize_t i = 0; + Py_ssize_t els = 0; + while (i < set_len) { + uintptr_t key_addr; + if (read_py_ptr(pid, table_ptr, &key_addr)) { + return -1; + } + + if ((void*)key_addr != NULL) { + Py_ssize_t ref_cnt; + if (read_ssize_t(pid, table_ptr, &ref_cnt)) { + return -1; + } + + if (ref_cnt) { + // if 'ref_cnt=0' it's a set dummy marker + + if (parse_task( + pid, + offsets, + async_offsets, + key_addr, + awaited_by) + ) { + return -1; + } + + if (++els == num_els) { + break; + } + } + } + + table_ptr += sizeof(void*) * 2; + i++; + } + return 0; +} + + +static int +parse_task_awaited_by( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address, + PyObject *awaited_by +) { + uintptr_t task_ab_addr; + int err = read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_awaited_by, + &task_ab_addr); + if (err) { + return -1; + } + + if ((void*)task_ab_addr == NULL) { + return 0; + } + + char awaited_by_is_a_set; + err = read_char( + pid, + task_address + async_offsets->asyncio_task_object.task_awaited_by_is_set, + &awaited_by_is_a_set); + if (err) { + return -1; + } + + if (awaited_by_is_a_set) { + if (parse_tasks_in_set( + pid, + offsets, + async_offsets, + task_address + async_offsets->asyncio_task_object.task_awaited_by, + awaited_by) + ) { + return -1; + } + } else { + uintptr_t sub_task; + if (read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_awaited_by, + &sub_task) + ) { + return -1; + } + + if (parse_task( + pid, + offsets, + async_offsets, + sub_task, + awaited_by) + ) { + return -1; + } + } + + return 0; +} + +static int +parse_code_object( + int pid, + PyObject* result, + struct _Py_DebugOffsets* offsets, + uintptr_t address, + uintptr_t* previous_frame +) { + uintptr_t address_of_function_name; + int bytes_read = read_memory( + pid, + address + offsets->code_object.name, + sizeof(void*), + &address_of_function_name + ); + if (bytes_read < 0) { return -1; } - char function_name[256]; - if (read_string(pid, offsets, address_of_function_name, function_name, sizeof(function_name)) != 0) { + if ((void*)address_of_function_name == NULL) { + PyErr_SetString(PyExc_RuntimeError, "No function name found"); return -1; } - PyObject* py_function_name = PyUnicode_FromString(function_name); + PyObject* py_function_name = read_py_str( + pid, offsets, address_of_function_name, 256); if (py_function_name == NULL) { return -1; } @@ -486,54 +1171,283 @@ parse_code_object( static int parse_frame_object( - int pid, - PyObject* result, - struct _Py_DebugOffsets* offsets, - void* address, - void** previous_frame) -{ + int pid, + PyObject* result, + struct _Py_DebugOffsets* offsets, + uintptr_t address, + uintptr_t* previous_frame +) { + int err; + + ssize_t bytes_read = read_memory( + pid, + address + offsets->interpreter_frame.previous, + sizeof(void*), + previous_frame + ); + if (bytes_read < 0) { + return -1; + } + + char owner; + if (read_char(pid, address + offsets->interpreter_frame.owner, &owner)) { + return -1; + } + + if (owner >= FRAME_OWNED_BY_INTERPRETER) { + return 0; + } + + uintptr_t address_of_code_object; + err = read_py_ptr( + pid, + address + offsets->interpreter_frame.executable, + &address_of_code_object + ); + if (err) { + return -1; + } + + if ((void*)address_of_code_object == NULL) { + return 0; + } + + return parse_code_object( + pid, result, offsets, address_of_code_object, previous_frame); +} + +static int +parse_async_frame_object( + int pid, + PyObject* result, + struct _Py_DebugOffsets* offsets, + uintptr_t address, + uintptr_t* previous_frame, + uintptr_t* code_object +) { + int err; + + ssize_t bytes_read = read_memory( + pid, + address + offsets->interpreter_frame.previous, + sizeof(void*), + previous_frame + ); + if (bytes_read < 0) { + return -1; + } + + char owner; + bytes_read = read_memory( + pid, address + offsets->interpreter_frame.owner, sizeof(char), &owner); + if (bytes_read < 0) { + return -1; + } + + if (owner == FRAME_OWNED_BY_CSTACK || owner == FRAME_OWNED_BY_INTERPRETER) { + return 0; // C frame + } + + if (owner != FRAME_OWNED_BY_GENERATOR + && owner != FRAME_OWNED_BY_THREAD) { + PyErr_Format(PyExc_RuntimeError, "Unhandled frame owner %d.\n", owner); + return -1; + } + + err = read_py_ptr( + pid, + address + offsets->interpreter_frame.executable, + code_object + ); + if (err) { + return -1; + } + + assert(code_object != NULL); + if ((void*)*code_object == NULL) { + return 0; + } + + if (parse_code_object( + pid, result, offsets, *code_object, previous_frame)) { + return -1; + } + + return 1; +} + +static int +read_offsets( + int pid, + uintptr_t *runtime_start_address, + _Py_DebugOffsets* debug_offsets +) { + *runtime_start_address = get_py_runtime(pid); + if ((void*)*runtime_start_address == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString( + PyExc_RuntimeError, "Failed to get .PyRuntime address"); + } + return -1; + } + size_t size = sizeof(struct _Py_DebugOffsets); + ssize_t bytes_read = read_memory( + pid, *runtime_start_address, size, debug_offsets); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_async_debug( + int pid, + struct _Py_AsyncioModuleDebugOffsets* async_debug +) { + uintptr_t async_debug_addr = get_async_debug(pid); + if (!async_debug_addr) { + return -1; + } + size_t size = sizeof(struct _Py_AsyncioModuleDebugOffsets); ssize_t bytes_read = read_memory( + pid, async_debug_addr, size, async_debug); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +find_running_frame( + int pid, + uintptr_t runtime_start_address, + _Py_DebugOffsets* local_debug_offsets, + uintptr_t *frame +) { + off_t interpreter_state_list_head = + local_debug_offsets->runtime_state.interpreters_head; + + uintptr_t address_of_interpreter_state; + int bytes_read = read_memory( pid, - (void*)(address + offsets->interpreter_frame.previous), + runtime_start_address + interpreter_state_list_head, sizeof(void*), - previous_frame); - if (bytes_read == -1) { + &address_of_interpreter_state); + if (bytes_read < 0) { return -1; } - char owner; - bytes_read = - read_memory(pid, (void*)(address + offsets->interpreter_frame.owner), sizeof(char), &owner); + if (address_of_interpreter_state == 0) { + PyErr_SetString(PyExc_RuntimeError, "No interpreter state found"); + return -1; + } + + uintptr_t address_of_thread; + bytes_read = read_memory( + pid, + address_of_interpreter_state + + local_debug_offsets->interpreter_state.threads_head, + sizeof(void*), + &address_of_thread); if (bytes_read < 0) { return -1; } - if (owner == FRAME_OWNED_BY_CSTACK) { + // No Python frames are available for us (can happen at tear-down). + if ((void*)address_of_thread != NULL) { + int err = read_ptr( + pid, + address_of_thread + local_debug_offsets->thread_state.current_frame, + frame); + if (err) { + return -1; + } return 0; } - uintptr_t address_of_code_object; + *frame = (uintptr_t)NULL; + return 0; +} + +static int +find_running_task( + int pid, + uintptr_t runtime_start_address, + _Py_DebugOffsets *local_debug_offsets, + struct _Py_AsyncioModuleDebugOffsets *async_offsets, + uintptr_t *running_task_addr +) { + *running_task_addr = (uintptr_t)NULL; + + off_t interpreter_state_list_head = + local_debug_offsets->runtime_state.interpreters_head; + + uintptr_t address_of_interpreter_state; + int bytes_read = read_memory( + pid, + runtime_start_address + interpreter_state_list_head, + sizeof(void*), + &address_of_interpreter_state); + if (bytes_read < 0) { + return -1; + } + + if (address_of_interpreter_state == 0) { + PyErr_SetString(PyExc_RuntimeError, "No interpreter state found"); + return -1; + } + + uintptr_t address_of_thread; bytes_read = read_memory( pid, - (void*)(address + offsets->interpreter_frame.executable), + address_of_interpreter_state + + local_debug_offsets->interpreter_state.threads_head, sizeof(void*), - &address_of_code_object); + &address_of_thread); + if (bytes_read < 0) { + return -1; + } + + uintptr_t address_of_running_loop; + // No Python frames are available for us (can happen at tear-down). + if ((void*)address_of_thread == NULL) { + return 0; + } + + bytes_read = read_py_ptr( + pid, + address_of_thread + + async_offsets->asyncio_thread_state.asyncio_running_loop, + &address_of_running_loop); if (bytes_read == -1) { return -1; } - if (address_of_code_object == 0) { + // no asyncio loop is now running + if ((void*)address_of_running_loop == NULL) { return 0; } - address_of_code_object &= ~Py_TAG_BITS; - return parse_code_object(pid, result, offsets, (void *)address_of_code_object, previous_frame); + + int err = read_ptr( + pid, + address_of_thread + + async_offsets->asyncio_thread_state.asyncio_running_task, + running_task_addr); + if (err) { + return -1; + } + + return 0; } static PyObject* get_stack_trace(PyObject* self, PyObject* args) { -#if (!defined(__linux__) && !defined(__APPLE__)) || (defined(__linux__) && !HAVE_PROCESS_VM_READV) - PyErr_SetString(PyExc_RuntimeError, "get_stack_trace is not supported on this platform"); +#if (!defined(__linux__) && !defined(__APPLE__)) || \ + (defined(__linux__) && !HAVE_PROCESS_VM_READV) + PyErr_SetString( + PyExc_RuntimeError, + "get_stack_trace is not supported on this platform"); return NULL; #endif int pid; @@ -542,88 +1456,205 @@ get_stack_trace(PyObject* self, PyObject* args) return NULL; } - void* runtime_start_address = get_py_runtime(pid); - if (runtime_start_address == NULL) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_RuntimeError, "Failed to get .PyRuntime address"); - } + uintptr_t runtime_start_address = get_py_runtime(pid); + struct _Py_DebugOffsets local_debug_offsets; + + if (read_offsets(pid, &runtime_start_address, &local_debug_offsets)) { return NULL; } - size_t size = sizeof(struct _Py_DebugOffsets); - struct _Py_DebugOffsets local_debug_offsets; - ssize_t bytes_read = read_memory(pid, runtime_start_address, size, &local_debug_offsets); - if (bytes_read == -1) { + uintptr_t address_of_current_frame; + if (find_running_frame( + pid, runtime_start_address, &local_debug_offsets, + &address_of_current_frame) + ) { return NULL; } - off_t interpreter_state_list_head = local_debug_offsets.runtime_state.interpreters_head; - void* address_of_interpreter_state; - bytes_read = read_memory( - pid, - (void*)(runtime_start_address + interpreter_state_list_head), - sizeof(void*), - &address_of_interpreter_state); - if (bytes_read == -1) { + PyObject* result = PyList_New(0); + if (result == NULL) { return NULL; } - if (address_of_interpreter_state == NULL) { - PyErr_SetString(PyExc_RuntimeError, "No interpreter state found"); + while ((void*)address_of_current_frame != NULL) { + if (parse_frame_object( + pid, + result, + &local_debug_offsets, + address_of_current_frame, + &address_of_current_frame) + < 0) + { + Py_DECREF(result); + return NULL; + } + } + + return result; +} + +static PyObject* +get_async_stack_trace(PyObject* self, PyObject* args) +{ +#if (!defined(__linux__) && !defined(__APPLE__)) || \ + (defined(__linux__) && !HAVE_PROCESS_VM_READV) + PyErr_SetString( + PyExc_RuntimeError, + "get_stack_trace is not supported on this platform"); + return NULL; +#endif + int pid; + + if (!PyArg_ParseTuple(args, "i", &pid)) { return NULL; } - void* address_of_thread; - bytes_read = read_memory( - pid, - (void*)(address_of_interpreter_state + local_debug_offsets.interpreter_state.threads_head), - sizeof(void*), - &address_of_thread); - if (bytes_read == -1) { + uintptr_t runtime_start_address = get_py_runtime(pid); + struct _Py_DebugOffsets local_debug_offsets; + + if (read_offsets(pid, &runtime_start_address, &local_debug_offsets)) { return NULL; } - PyObject* result = PyList_New(0); + struct _Py_AsyncioModuleDebugOffsets local_async_debug; + if (read_async_debug(pid, &local_async_debug)) { + return NULL; + } + + PyObject* result = PyList_New(1); if (result == NULL) { return NULL; } + PyObject* calls = PyList_New(0); + if (calls == NULL) { + return NULL; + } + if (PyList_SetItem(result, 0, calls)) { /* steals ref to 'calls' */ + Py_DECREF(result); + Py_DECREF(calls); + return NULL; + } - // No Python frames are available for us (can happen at tear-down). - if (address_of_thread != NULL) { - void* address_of_current_frame; - (void)read_memory( - pid, - (void*)(address_of_thread + local_debug_offsets.thread_state.current_frame), - sizeof(void*), - &address_of_current_frame); - while (address_of_current_frame != NULL) { - if (parse_frame_object( - pid, - result, - &local_debug_offsets, - address_of_current_frame, - &address_of_current_frame) - < 0) - { - Py_DECREF(result); - return NULL; - } + uintptr_t running_task_addr = (uintptr_t)NULL; + if (find_running_task( + pid, runtime_start_address, &local_debug_offsets, &local_async_debug, + &running_task_addr) + ) { + goto result_err; + } + + if ((void*)running_task_addr == NULL) { + PyErr_SetString(PyExc_RuntimeError, "No running task found"); + goto result_err; + } + + uintptr_t running_coro_addr; + if (read_py_ptr( + pid, + running_task_addr + local_async_debug.asyncio_task_object.task_coro, + &running_coro_addr + )) { + goto result_err; + } + + if ((void*)running_coro_addr == NULL) { + PyErr_SetString(PyExc_RuntimeError, "Running task coro is NULL"); + goto result_err; + } + + // note: genobject's gi_iframe is an embedded struct so the address to + // the offset leads directly to its first field: f_executable + uintptr_t address_of_running_task_code_obj; + if (read_py_ptr( + pid, + running_coro_addr + local_debug_offsets.gen_object.gi_iframe, + &address_of_running_task_code_obj + )) { + goto result_err; + } + + if ((void*)address_of_running_task_code_obj == NULL) { + PyErr_SetString(PyExc_RuntimeError, "Running task code object is NULL"); + goto result_err; + } + + uintptr_t address_of_current_frame; + if (find_running_frame( + pid, runtime_start_address, &local_debug_offsets, + &address_of_current_frame) + ) { + goto result_err; + } + + uintptr_t address_of_code_object; + while ((void*)address_of_current_frame != NULL) { + int res = parse_async_frame_object( + pid, + calls, + &local_debug_offsets, + address_of_current_frame, + &address_of_current_frame, + &address_of_code_object + ); + + if (res < 0) { + goto result_err; + } + + if (address_of_code_object == address_of_running_task_code_obj) { + break; } } + PyObject *tn = parse_task_name( + pid, &local_debug_offsets, &local_async_debug, running_task_addr); + if (tn == NULL) { + goto result_err; + } + if (PyList_Append(result, tn)) { + Py_DECREF(tn); + goto result_err; + } + Py_DECREF(tn); + + PyObject* awaited_by = PyList_New(0); + if (awaited_by == NULL) { + goto result_err; + } + if (PyList_Append(result, awaited_by)) { + Py_DECREF(awaited_by); + goto result_err; + } + Py_DECREF(awaited_by); + + if (parse_task_awaited_by( + pid, &local_debug_offsets, &local_async_debug, + running_task_addr, awaited_by) + ) { + goto result_err; + } + return result; + +result_err: + Py_DECREF(result); + return NULL; } + static PyMethodDef methods[] = { - {"get_stack_trace", get_stack_trace, METH_VARARGS, "Get the Python stack from a given PID"}, - {NULL, NULL, 0, NULL}, + {"get_stack_trace", get_stack_trace, METH_VARARGS, + "Get the Python stack from a given PID"}, + {"get_async_stack_trace", get_async_stack_trace, METH_VARARGS, + "Get the asyncio stack from a given PID"}, + {NULL, NULL, 0, NULL}, }; static struct PyModuleDef module = { - .m_base = PyModuleDef_HEAD_INIT, - .m_name = "_testexternalinspection", - .m_size = -1, - .m_methods = methods, + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "_testexternalinspection", + .m_size = -1, + .m_methods = methods, }; PyMODINIT_FUNC @@ -636,7 +1667,8 @@ PyInit__testexternalinspection(void) #ifdef Py_GIL_DISABLED PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED); #endif - int rc = PyModule_AddIntConstant(mod, "PROCESS_VM_READV_SUPPORTED", HAVE_PROCESS_VM_READV); + int rc = PyModule_AddIntConstant( + mod, "PROCESS_VM_READV_SUPPORTED", HAVE_PROCESS_VM_READV); if (rc < 0) { Py_DECREF(mod); return NULL; diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 150d34d168f5e4..e44b629897c58a 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -25,10 +25,8 @@ #include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() #include "pycore_instruction_sequence.h" // _PyInstructionSequence_New() -#include "pycore_interp.h" // _PyInterpreterState_GetConfigCopy() -#include "pycore_long.h" // _PyLong_Sign() #include "pycore_object.h" // _PyObject_IsFreed() -#include "pycore_optimizer.h" // _Py_UopsSymbol, etc. +#include "pycore_optimizer.h" // JitOptSymbol, etc. #include "pycore_pathconfig.h" // _PyPathConfig_ClearGlobal() #include "pycore_pyerrors.h" // _PyErr_ChainExceptions1() #include "pycore_pylifecycle.h" // _PyInterpreterConfig_AsDict() @@ -318,41 +316,6 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) } -static PyObject * -test_get_config(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(args)) -{ - PyConfig config; - PyConfig_InitIsolatedConfig(&config); - if (_PyInterpreterState_GetConfigCopy(&config) < 0) { - PyConfig_Clear(&config); - return NULL; - } - PyObject *dict = _PyConfig_AsDict(&config); - PyConfig_Clear(&config); - return dict; -} - - -static PyObject * -test_set_config(PyObject *Py_UNUSED(self), PyObject *dict) -{ - PyConfig config; - PyConfig_InitIsolatedConfig(&config); - if (_PyConfig_FromDict(&config, dict) < 0) { - goto error; - } - if (_PyInterpreterState_SetConfig(&config) < 0) { - goto error; - } - PyConfig_Clear(&config); - Py_RETURN_NONE; - -error: - PyConfig_Clear(&config); - return NULL; -} - - static PyObject * test_reset_path_config(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(arg)) { @@ -987,44 +950,13 @@ get_co_framesize(PyObject *self, PyObject *arg) return PyLong_FromLong(code->co_framesize); } -#ifdef _Py_TIER2 - -static PyObject * -new_counter_optimizer(PyObject *self, PyObject *arg) -{ - return _PyOptimizer_NewCounter(); -} - static PyObject * -new_uop_optimizer(PyObject *self, PyObject *arg) +jit_enabled(PyObject *self, PyObject *arg) { - return _PyOptimizer_NewUOpOptimizer(); + return PyBool_FromLong(_PyInterpreterState_GET()->jit); } -static PyObject * -set_optimizer(PyObject *self, PyObject *opt) -{ - if (opt == Py_None) { - opt = NULL; - } - if (_Py_SetTier2Optimizer((_PyOptimizerObject*)opt) < 0) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -get_optimizer(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *opt = NULL; #ifdef _Py_TIER2 - opt = (PyObject *)_Py_GetOptimizer(); -#endif - if (opt == NULL) { - Py_RETURN_NONE; - } - return opt; -} static PyObject * add_executor_dependency(PyObject *self, PyObject *args) @@ -1034,12 +966,6 @@ add_executor_dependency(PyObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "OO", &exec, &obj)) { return NULL; } - /* No way to tell in general if exec is an executor, so we only accept - * counting_executor */ - if (strcmp(Py_TYPE(exec)->tp_name, "counting_executor")) { - PyErr_SetString(PyExc_TypeError, "argument must be a counting_executor"); - return NULL; - } _Py_Executor_DependsOn((_PyExecutorObject *)exec, obj); Py_RETURN_NONE; } @@ -1846,14 +1772,14 @@ _testinternalcapi_test_long_numbits_impl(PyObject *module) for (i = 0; i < Py_ARRAY_LENGTH(testcases); ++i) { uint64_t nbits; - int sign; + int sign = -7; PyObject *plong; plong = PyLong_FromLong(testcases[i].input); if (plong == NULL) return NULL; nbits = _PyLong_NumBits(plong); - sign = _PyLong_Sign(plong); + (void)PyLong_GetSign(plong, &sign); Py_DECREF(plong); if (nbits != testcases[i].nbits) @@ -1861,7 +1787,7 @@ _testinternalcapi_test_long_numbits_impl(PyObject *module) "wrong result for _PyLong_NumBits"); if (sign != testcases[i].sign) return raiseTestError("test_long_numbits", - "wrong result for _PyLong_Sign"); + "wrong result for PyLong_GetSign()"); } Py_RETURN_NONE; } @@ -2074,8 +2000,6 @@ static PyMethodDef module_functions[] = { {"test_popcount", test_popcount, METH_NOARGS}, {"test_bit_length", test_bit_length, METH_NOARGS}, {"test_hashtable", test_hashtable, METH_NOARGS}, - {"get_config", test_get_config, METH_NOARGS}, - {"set_config", test_set_config, METH_O}, {"reset_path_config", test_reset_path_config, METH_NOARGS}, {"test_edit_cost", test_edit_cost, METH_NOARGS}, {"test_bytes_find", test_bytes_find, METH_NOARGS}, @@ -2098,11 +2022,8 @@ static PyMethodDef module_functions[] = { {"iframe_getline", iframe_getline, METH_O, NULL}, {"iframe_getlasti", iframe_getlasti, METH_O, NULL}, {"get_co_framesize", get_co_framesize, METH_O, NULL}, + {"jit_enabled", jit_enabled, METH_NOARGS, NULL}, #ifdef _Py_TIER2 - {"get_optimizer", get_optimizer, METH_NOARGS, NULL}, - {"set_optimizer", set_optimizer, METH_O, NULL}, - {"new_counter_optimizer", new_counter_optimizer, METH_NOARGS, NULL}, - {"new_uop_optimizer", new_uop_optimizer, METH_NOARGS, NULL}, {"add_executor_dependency", add_executor_dependency, METH_VARARGS, NULL}, {"invalidate_executors", invalidate_executors, METH_O, NULL}, #endif @@ -2217,6 +2138,21 @@ module_exec(PyObject *module) return 1; } + if (PyModule_Add(module, "SPECIALIZATION_THRESHOLD", + PyLong_FromLong(ADAPTIVE_WARMUP_VALUE + 1)) < 0) { + return 1; + } + + if (PyModule_Add(module, "SPECIALIZATION_COOLDOWN", + PyLong_FromLong(ADAPTIVE_COOLDOWN_VALUE + 1)) < 0) { + return 1; + } + + if (PyModule_Add(module, "SHARED_KEYS_MAX_SIZE", + PyLong_FromLong(SHARED_KEYS_MAX_SIZE)) < 0) { + return 1; + } + return 0; } diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c index ba83a23117b2a5..4dae99ec92a085 100644 --- a/Modules/_testlimitedcapi.c +++ b/Modules/_testlimitedcapi.c @@ -56,6 +56,9 @@ PyInit__testlimitedcapi(void) if (_PyTestLimitedCAPI_Init_HeaptypeRelative(mod) < 0) { return NULL; } + if (_PyTestLimitedCAPI_Init_Import(mod) < 0) { + return NULL; + } if (_PyTestLimitedCAPI_Init_List(mod) < 0) { return NULL; } @@ -83,5 +86,11 @@ PyInit__testlimitedcapi(void) if (_PyTestLimitedCAPI_Init_VectorcallLimited(mod) < 0) { return NULL; } + if (_PyTestLimitedCAPI_Init_Version(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_File(mod) < 0) { + return NULL; + } return mod; } diff --git a/Modules/_testlimitedcapi/clinic/file.c.h b/Modules/_testlimitedcapi/clinic/file.c.h new file mode 100644 index 00000000000000..663619eead2a3a --- /dev/null +++ b/Modules/_testlimitedcapi/clinic/file.c.h @@ -0,0 +1,81 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testcapi_pyfile_getline__doc__, +"pyfile_getline($module, file, n, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYFILE_GETLINE_METHODDEF \ + {"pyfile_getline", (PyCFunction)(void(*)(void))_testcapi_pyfile_getline, METH_FASTCALL, _testcapi_pyfile_getline__doc__}, + +static PyObject * +_testcapi_pyfile_getline_impl(PyObject *module, PyObject *file, int n); + +static PyObject * +_testcapi_pyfile_getline(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *file; + int n; + + if (nargs != 2) { + PyErr_Format(PyExc_TypeError, "pyfile_getline expected 2 arguments, got %zd", nargs); + goto exit; + } + file = args[0]; + n = PyLong_AsInt(args[1]); + if (n == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testcapi_pyfile_getline_impl(module, file, n); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testcapi_pyfile_writeobject__doc__, +"pyfile_writeobject($module, obj, file, flags, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYFILE_WRITEOBJECT_METHODDEF \ + {"pyfile_writeobject", (PyCFunction)(void(*)(void))_testcapi_pyfile_writeobject, METH_FASTCALL, _testcapi_pyfile_writeobject__doc__}, + +static PyObject * +_testcapi_pyfile_writeobject_impl(PyObject *module, PyObject *obj, + PyObject *file, int flags); + +static PyObject * +_testcapi_pyfile_writeobject(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *obj; + PyObject *file; + int flags; + + if (nargs != 3) { + PyErr_Format(PyExc_TypeError, "pyfile_writeobject expected 3 arguments, got %zd", nargs); + goto exit; + } + obj = args[0]; + file = args[1]; + flags = PyLong_AsInt(args[2]); + if (flags == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testcapi_pyfile_writeobject_impl(module, obj, file, flags); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testcapi_pyobject_asfiledescriptor__doc__, +"pyobject_asfiledescriptor($module, obj, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYOBJECT_ASFILEDESCRIPTOR_METHODDEF \ + {"pyobject_asfiledescriptor", (PyCFunction)_testcapi_pyobject_asfiledescriptor, METH_O, _testcapi_pyobject_asfiledescriptor__doc__}, +/*[clinic end generated code: output=ea572aaaa01aec7b input=a9049054013a1b77]*/ diff --git a/Modules/_testlimitedcapi/clinic/version.c.h b/Modules/_testlimitedcapi/clinic/version.c.h new file mode 100644 index 00000000000000..096c7dd528b332 --- /dev/null +++ b/Modules/_testlimitedcapi/clinic/version.c.h @@ -0,0 +1,93 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testlimitedcapi_pack_full_version__doc__, +"pack_full_version($module, major, minor, micro, level, serial, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_PACK_FULL_VERSION_METHODDEF \ + {"pack_full_version", (PyCFunction)(void(*)(void))_testlimitedcapi_pack_full_version, METH_FASTCALL, _testlimitedcapi_pack_full_version__doc__}, + +static PyObject * +_testlimitedcapi_pack_full_version_impl(PyObject *module, int major, + int minor, int micro, int level, + int serial); + +static PyObject * +_testlimitedcapi_pack_full_version(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int major; + int minor; + int micro; + int level; + int serial; + + if (nargs != 5) { + PyErr_Format(PyExc_TypeError, "pack_full_version expected 5 arguments, got %zd", nargs); + goto exit; + } + major = PyLong_AsInt(args[0]); + if (major == -1 && PyErr_Occurred()) { + goto exit; + } + minor = PyLong_AsInt(args[1]); + if (minor == -1 && PyErr_Occurred()) { + goto exit; + } + micro = PyLong_AsInt(args[2]); + if (micro == -1 && PyErr_Occurred()) { + goto exit; + } + level = PyLong_AsInt(args[3]); + if (level == -1 && PyErr_Occurred()) { + goto exit; + } + serial = PyLong_AsInt(args[4]); + if (serial == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testlimitedcapi_pack_full_version_impl(module, major, minor, micro, level, serial); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testlimitedcapi_pack_version__doc__, +"pack_version($module, major, minor, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_PACK_VERSION_METHODDEF \ + {"pack_version", (PyCFunction)(void(*)(void))_testlimitedcapi_pack_version, METH_FASTCALL, _testlimitedcapi_pack_version__doc__}, + +static PyObject * +_testlimitedcapi_pack_version_impl(PyObject *module, int major, int minor); + +static PyObject * +_testlimitedcapi_pack_version(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int major; + int minor; + + if (nargs != 2) { + PyErr_Format(PyExc_TypeError, "pack_version expected 2 arguments, got %zd", nargs); + goto exit; + } + major = PyLong_AsInt(args[0]); + if (major == -1 && PyErr_Occurred()) { + goto exit; + } + minor = PyLong_AsInt(args[1]); + if (minor == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testlimitedcapi_pack_version_impl(module, major, minor); + +exit: + return return_value; +} +/*[clinic end generated code: output=aed3e226da77f2d2 input=a9049054013a1b77]*/ diff --git a/Modules/_testlimitedcapi/file.c b/Modules/_testlimitedcapi/file.c new file mode 100644 index 00000000000000..e082e3c6700ee7 --- /dev/null +++ b/Modules/_testlimitedcapi/file.c @@ -0,0 +1,128 @@ +#include "pyconfig.h" // Py_GIL_DISABLED +#ifndef Py_GIL_DISABLED + // Need limited C API 3.13 for PyLong_AsInt() +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" +#include "clinic/file.c.h" + + +/*[clinic input] +module _testcapi +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/ + + +static PyObject * +pyfile_fromfd(PyObject *module, PyObject *args) +{ + int fd; + const char *name; + Py_ssize_t size; + const char *mode; + int buffering; + const char *encoding; + const char *errors; + const char *newline; + int closefd; + if (!PyArg_ParseTuple(args, + "iz#z#" + "iz#z#" + "z#i", + &fd, &name, &size, &mode, &size, + &buffering, &encoding, &size, &errors, &size, + &newline, &size, &closefd)) { + return NULL; + } + + return PyFile_FromFd(fd, name, mode, buffering, + encoding, errors, newline, closefd); +} + + +/*[clinic input] +_testcapi.pyfile_getline + + file: object + n: int + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyfile_getline_impl(PyObject *module, PyObject *file, int n) +/*[clinic end generated code: output=137fde2774563266 input=df26686148b3657e]*/ +{ + return PyFile_GetLine(file, n); +} + + +/*[clinic input] +_testcapi.pyfile_writeobject + + obj: object + file: object + flags: int + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyfile_writeobject_impl(PyObject *module, PyObject *obj, + PyObject *file, int flags) +/*[clinic end generated code: output=ebb4d802e3db489c input=64a34a3e75b9935a]*/ +{ + NULLABLE(obj); + NULLABLE(file); + RETURN_INT(PyFile_WriteObject(obj, file, flags)); +} + + +static PyObject * +pyfile_writestring(PyObject *module, PyObject *args) +{ + const char *str; + Py_ssize_t size; + PyObject *file; + if (!PyArg_ParseTuple(args, "z#O", &str, &size, &file)) { + return NULL; + } + NULLABLE(file); + + RETURN_INT(PyFile_WriteString(str, file)); +} + + +/*[clinic input] +_testcapi.pyobject_asfiledescriptor + + obj: object + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyobject_asfiledescriptor(PyObject *module, PyObject *obj) +/*[clinic end generated code: output=2d640c6a1970c721 input=45fa1171d62b18d7]*/ +{ + NULLABLE(obj); + RETURN_INT(PyObject_AsFileDescriptor(obj)); +} + + +static PyMethodDef test_methods[] = { + {"pyfile_fromfd", pyfile_fromfd, METH_VARARGS}, + _TESTCAPI_PYFILE_GETLINE_METHODDEF + _TESTCAPI_PYFILE_WRITEOBJECT_METHODDEF + {"pyfile_writestring", pyfile_writestring, METH_VARARGS}, + _TESTCAPI_PYOBJECT_ASFILEDESCRIPTOR_METHODDEF + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_File(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} diff --git a/Modules/_testlimitedcapi/import.c b/Modules/_testlimitedcapi/import.c new file mode 100644 index 00000000000000..3707dbedeea0d9 --- /dev/null +++ b/Modules/_testlimitedcapi/import.c @@ -0,0 +1,306 @@ +// Need limited C API version 3.13 for PyImport_AddModuleRef() +#include "pyconfig.h" // Py_GIL_DISABLED +#if !defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API) +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" + + +/* Test PyImport_GetMagicNumber() */ +static PyObject * +pyimport_getmagicnumber(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + long magic = PyImport_GetMagicNumber(); + return PyLong_FromLong(magic); +} + + +/* Test PyImport_GetMagicTag() */ +static PyObject * +pyimport_getmagictag(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + const char *tag = PyImport_GetMagicTag(); + return PyUnicode_FromString(tag); +} + + +/* Test PyImport_GetModuleDict() */ +static PyObject * +pyimport_getmoduledict(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + return Py_XNewRef(PyImport_GetModuleDict()); +} + + +/* Test PyImport_GetModule() */ +static PyObject * +pyimport_getmodule(PyObject *Py_UNUSED(module), PyObject *name) +{ + assert(!PyErr_Occurred()); + NULLABLE(name); + PyObject *module = PyImport_GetModule(name); + if (module == NULL && !PyErr_Occurred()) { + return Py_NewRef(PyExc_KeyError); + } + return module; +} + + +/* Test PyImport_AddModuleObject() */ +static PyObject * +pyimport_addmoduleobject(PyObject *Py_UNUSED(module), PyObject *name) +{ + NULLABLE(name); + return Py_XNewRef(PyImport_AddModuleObject(name)); +} + + +/* Test PyImport_AddModule() */ +static PyObject * +pyimport_addmodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + return Py_XNewRef(PyImport_AddModule(name)); +} + + +/* Test PyImport_AddModuleRef() */ +static PyObject * +pyimport_addmoduleref(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + return PyImport_AddModuleRef(name); +} + + +/* Test PyImport_Import() */ +static PyObject * +pyimport_import(PyObject *Py_UNUSED(module), PyObject *name) +{ + NULLABLE(name); + return PyImport_Import(name); +} + + +/* Test PyImport_ImportModule() */ +static PyObject * +pyimport_importmodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + return PyImport_ImportModule(name); +} + + +/* Test PyImport_ImportModuleNoBlock() */ +static PyObject * +pyimport_importmodulenoblock(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + return PyImport_ImportModuleNoBlock(name); + _Py_COMP_DIAG_POP +} + + +/* Test PyImport_ImportModuleEx() */ +static PyObject * +pyimport_importmoduleex(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *globals, *locals, *fromlist; + if (!PyArg_ParseTuple(args, "z#OOO", + &name, &size, &globals, &locals, &fromlist)) { + return NULL; + } + NULLABLE(globals); + NULLABLE(locals); + NULLABLE(fromlist); + + return PyImport_ImportModuleEx(name, globals, locals, fromlist); +} + + +/* Test PyImport_ImportModuleLevel() */ +static PyObject * +pyimport_importmodulelevel(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *globals, *locals, *fromlist; + int level; + if (!PyArg_ParseTuple(args, "z#OOOi", + &name, &size, &globals, &locals, &fromlist, &level)) { + return NULL; + } + NULLABLE(globals); + NULLABLE(locals); + NULLABLE(fromlist); + + return PyImport_ImportModuleLevel(name, globals, locals, fromlist, level); +} + + +/* Test PyImport_ImportModuleLevelObject() */ +static PyObject * +pyimport_importmodulelevelobject(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *name, *globals, *locals, *fromlist; + int level; + if (!PyArg_ParseTuple(args, "OOOOi", + &name, &globals, &locals, &fromlist, &level)) { + return NULL; + } + NULLABLE(name); + NULLABLE(globals); + NULLABLE(locals); + NULLABLE(fromlist); + + return PyImport_ImportModuleLevelObject(name, globals, locals, fromlist, level); +} + + +/* Test PyImport_ImportFrozenModule() */ +static PyObject * +pyimport_importfrozenmodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + RETURN_INT(PyImport_ImportFrozenModule(name)); +} + + +/* Test PyImport_ImportFrozenModuleObject() */ +static PyObject * +pyimport_importfrozenmoduleobject(PyObject *Py_UNUSED(module), PyObject *name) +{ + NULLABLE(name); + RETURN_INT(PyImport_ImportFrozenModuleObject(name)); +} + + +/* Test PyImport_ExecCodeModule() */ +static PyObject * +pyimport_executecodemodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *code; + if (!PyArg_ParseTuple(args, "z#O", &name, &size, &code)) { + return NULL; + } + NULLABLE(code); + + return PyImport_ExecCodeModule(name, code); +} + + +/* Test PyImport_ExecCodeModuleEx() */ +static PyObject * +pyimport_executecodemoduleex(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *code; + const char *pathname; + if (!PyArg_ParseTuple(args, "z#Oz#", &name, &size, &code, &pathname, &size)) { + return NULL; + } + NULLABLE(code); + + return PyImport_ExecCodeModuleEx(name, code, pathname); +} + + +/* Test PyImport_ExecCodeModuleWithPathnames() */ +static PyObject * +pyimport_executecodemodulewithpathnames(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *code; + const char *pathname; + const char *cpathname; + if (!PyArg_ParseTuple(args, "z#Oz#z#", &name, &size, &code, &pathname, &size, &cpathname, &size)) { + return NULL; + } + NULLABLE(code); + + return PyImport_ExecCodeModuleWithPathnames(name, code, + pathname, cpathname); +} + + +/* Test PyImport_ExecCodeModuleObject() */ +static PyObject * +pyimport_executecodemoduleobject(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *name, *code, *pathname, *cpathname; + if (!PyArg_ParseTuple(args, "OOOO", &name, &code, &pathname, &cpathname)) { + return NULL; + } + NULLABLE(name); + NULLABLE(code); + NULLABLE(pathname); + NULLABLE(cpathname); + + return PyImport_ExecCodeModuleObject(name, code, pathname, cpathname); +} + + +static PyMethodDef test_methods[] = { + {"PyImport_GetMagicNumber", pyimport_getmagicnumber, METH_NOARGS}, + {"PyImport_GetMagicTag", pyimport_getmagictag, METH_NOARGS}, + {"PyImport_GetModuleDict", pyimport_getmoduledict, METH_NOARGS}, + {"PyImport_GetModule", pyimport_getmodule, METH_O}, + {"PyImport_AddModuleObject", pyimport_addmoduleobject, METH_O}, + {"PyImport_AddModule", pyimport_addmodule, METH_VARARGS}, + {"PyImport_AddModuleRef", pyimport_addmoduleref, METH_VARARGS}, + {"PyImport_Import", pyimport_import, METH_O}, + {"PyImport_ImportModule", pyimport_importmodule, METH_VARARGS}, + {"PyImport_ImportModuleNoBlock", pyimport_importmodulenoblock, METH_VARARGS}, + {"PyImport_ImportModuleEx", pyimport_importmoduleex, METH_VARARGS}, + {"PyImport_ImportModuleLevel", pyimport_importmodulelevel, METH_VARARGS}, + {"PyImport_ImportModuleLevelObject", pyimport_importmodulelevelobject, METH_VARARGS}, + {"PyImport_ImportFrozenModule", pyimport_importfrozenmodule, METH_VARARGS}, + {"PyImport_ImportFrozenModuleObject", pyimport_importfrozenmoduleobject, METH_O}, + {"PyImport_ExecCodeModule", pyimport_executecodemodule, METH_VARARGS}, + {"PyImport_ExecCodeModuleEx", pyimport_executecodemoduleex, METH_VARARGS}, + {"PyImport_ExecCodeModuleWithPathnames", pyimport_executecodemodulewithpathnames, METH_VARARGS}, + {"PyImport_ExecCodeModuleObject", pyimport_executecodemoduleobject, METH_VARARGS}, + {NULL}, +}; + + +int +_PyTestLimitedCAPI_Init_Import(PyObject *module) +{ + return PyModule_AddFunctions(module, test_methods); +} diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h index 4107b150c5b4e0..60f6f03011a65c 100644 --- a/Modules/_testlimitedcapi/parts.h +++ b/Modules/_testlimitedcapi/parts.h @@ -31,6 +31,7 @@ int _PyTestLimitedCAPI_Init_Dict(PyObject *module); int _PyTestLimitedCAPI_Init_Eval(PyObject *module); int _PyTestLimitedCAPI_Init_Float(PyObject *module); int _PyTestLimitedCAPI_Init_HeaptypeRelative(PyObject *module); +int _PyTestLimitedCAPI_Init_Import(PyObject *module); int _PyTestLimitedCAPI_Init_Object(PyObject *module); int _PyTestLimitedCAPI_Init_List(PyObject *module); int _PyTestLimitedCAPI_Init_Long(PyObject *module); @@ -40,5 +41,7 @@ int _PyTestLimitedCAPI_Init_Sys(PyObject *module); int _PyTestLimitedCAPI_Init_Tuple(PyObject *module); int _PyTestLimitedCAPI_Init_Unicode(PyObject *module); int _PyTestLimitedCAPI_Init_VectorcallLimited(PyObject *module); +int _PyTestLimitedCAPI_Init_Version(PyObject *module); +int _PyTestLimitedCAPI_Init_File(PyObject *module); #endif // Py_TESTLIMITEDCAPI_PARTS_H diff --git a/Modules/_testlimitedcapi/version.c b/Modules/_testlimitedcapi/version.c new file mode 100644 index 00000000000000..57cd6e4e928ea3 --- /dev/null +++ b/Modules/_testlimitedcapi/version.c @@ -0,0 +1,77 @@ +/* Test version macros in the limited API */ + +#include "pyconfig.h" // Py_GIL_DISABLED +#ifndef Py_GIL_DISABLED +# define Py_LIMITED_API 0x030e0000 // Added in 3.14 +#endif + +#include "parts.h" +#include "clinic/version.c.h" +#include + +/*[clinic input] +module _testlimitedcapi +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=2700057f9c1135ba]*/ + +/*[clinic input] +_testlimitedcapi.pack_full_version + + major: int + minor: int + micro: int + level: int + serial: int + / +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_pack_full_version_impl(PyObject *module, int major, + int minor, int micro, int level, + int serial) +/*[clinic end generated code: output=b87a1e9805648861 input=2a304423be61d2ac]*/ +{ + uint32_t macro_result = Py_PACK_FULL_VERSION( + major, minor, micro, level, serial); +#undef Py_PACK_FULL_VERSION + uint32_t func_result = Py_PACK_FULL_VERSION( + major, minor, micro, level, serial); + + assert(macro_result == func_result); + return PyLong_FromUnsignedLong((unsigned long)func_result); +} + +/*[clinic input] +_testlimitedcapi.pack_version + + major: int + minor: int + / +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_pack_version_impl(PyObject *module, int major, int minor) +/*[clinic end generated code: output=771247bbd06e7883 input=3e39e9dcbc09e86a]*/ +{ + uint32_t macro_result = Py_PACK_VERSION(major, minor); +#undef Py_PACK_VERSION + uint32_t func_result = Py_PACK_VERSION(major, minor); + + assert(macro_result == func_result); + return PyLong_FromUnsignedLong((unsigned long)func_result); +} + +static PyMethodDef TestMethods[] = { + _TESTLIMITEDCAPI_PACK_FULL_VERSION_METHODDEF + _TESTLIMITEDCAPI_PACK_VERSION_METHODDEF + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Version(PyObject *m) +{ + if (PyModule_AddFunctions(m, TestMethods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 2cbdfeb09b95ae..e251736fb36aa9 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -47,6 +47,14 @@ get_thread_state(PyObject *module) } +#ifdef MS_WINDOWS +typedef HRESULT (WINAPI *PF_GET_THREAD_DESCRIPTION)(HANDLE, PCWSTR*); +typedef HRESULT (WINAPI *PF_SET_THREAD_DESCRIPTION)(HANDLE, PCWSTR); +static PF_GET_THREAD_DESCRIPTION pGetThreadDescription = NULL; +static PF_SET_THREAD_DESCRIPTION pSetThreadDescription = NULL; +#endif + + /*[clinic input] module _thread [clinic start generated code]*/ @@ -1414,6 +1422,10 @@ local_new(PyTypeObject *type, PyObject *args, PyObject *kw) return NULL; } + // gh-128691: Use deferred reference counting for thread-locals to avoid + // contention on the shared object. + _PyObject_SetDeferredRefcount((PyObject *)self); + self->args = Py_XNewRef(args); self->kw = Py_XNewRef(kw); @@ -1527,17 +1539,20 @@ create_localsdict(localobject *self, thread_module_state *state, goto err; } - if (PyDict_SetItem(self->localdicts, tstate->threading_local_key, ldict) < - 0) { + if (PyDict_SetItem(self->localdicts, tstate->threading_local_key, + ldict) < 0) + { goto err; } wr = create_sentinel_wr(self); if (wr == NULL) { PyObject *exc = PyErr_GetRaisedException(); - if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < - 0) { - PyErr_WriteUnraisable((PyObject *)self); + if (PyDict_DelItem(self->localdicts, + tstate->threading_local_key) < 0) + { + PyErr_FormatUnraisable("Exception ignored while deleting " + "thread local of %R", self); } PyErr_SetRaisedException(exc); goto err; @@ -1545,9 +1560,11 @@ create_localsdict(localobject *self, thread_module_state *state, if (PySet_Add(self->thread_watchdogs, wr) < 0) { PyObject *exc = PyErr_GetRaisedException(); - if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < - 0) { - PyErr_WriteUnraisable((PyObject *)self); + if (PyDict_DelItem(self->localdicts, + tstate->threading_local_key) < 0) + { + PyErr_FormatUnraisable("Exception ignored while deleting " + "thread local of %R", self); } PyErr_SetRaisedException(exc); goto err; @@ -1597,13 +1614,16 @@ _ldict(localobject *self, thread_module_state *state) we create a new one the next time we do an attr access */ PyObject *exc = PyErr_GetRaisedException(); - if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < - 0) { - PyErr_WriteUnraisable((PyObject *)self); - PyErr_Clear(); + if (PyDict_DelItem(self->localdicts, + tstate->threading_local_key) < 0) + { + PyErr_FormatUnraisable("Exception ignored while deleting " + "thread local of %R", self); + assert(!PyErr_Occurred()); } if (PySet_Discard(self->thread_watchdogs, wr) < 0) { - PyErr_WriteUnraisable((PyObject *)self); + PyErr_FormatUnraisable("Exception ignored while discarding " + "thread watchdog of %R", self); } PyErr_SetRaisedException(exc); Py_DECREF(ldict); @@ -1734,12 +1754,14 @@ clear_locals(PyObject *locals_and_key, PyObject *dummyweakref) if (self->localdicts != NULL) { PyObject *key = PyTuple_GetItem(locals_and_key, 1); if (PyDict_Pop(self->localdicts, key, NULL) < 0) { - PyErr_WriteUnraisable((PyObject*)self); + PyErr_FormatUnraisable("Exception ignored while clearing " + "thread local %R", (PyObject *)self); } } if (self->thread_watchdogs != NULL) { if (PySet_Discard(self->thread_watchdogs, dummyweakref) < 0) { - PyErr_WriteUnraisable((PyObject *)self); + PyErr_FormatUnraisable("Exception ignored while clearing " + "thread local %R", (PyObject *)self); } } @@ -2302,7 +2324,8 @@ thread_shutdown(PyObject *self, PyObject *args) // Wait for the thread to finish. If we're interrupted, such // as by a ctrl-c we print the error and exit early. if (ThreadHandle_join(handle, -1) < 0) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while joining a thread " + "in _thread._shutdown()"); ThreadHandle_decref(handle); Py_RETURN_NONE; } @@ -2364,7 +2387,7 @@ Internal only. Return a non-zero integer that uniquely identifies the main threa of the main interpreter."); -#ifdef HAVE_PTHREAD_GETNAME_NP +#if defined(HAVE_PTHREAD_GETNAME_NP) || defined(MS_WINDOWS) /*[clinic input] _thread._get_name @@ -2375,6 +2398,7 @@ static PyObject * _thread__get_name_impl(PyObject *module) /*[clinic end generated code: output=20026e7ee3da3dd7 input=35cec676833d04c8]*/ { +#ifndef MS_WINDOWS // Linux and macOS are limited to respectively 16 and 64 bytes char name[100]; pthread_t thread = pthread_self(); @@ -2389,11 +2413,26 @@ _thread__get_name_impl(PyObject *module) #else return PyUnicode_DecodeFSDefault(name); #endif +#else + // Windows implementation + assert(pGetThreadDescription != NULL); + + wchar_t *name; + HRESULT hr = pGetThreadDescription(GetCurrentThread(), &name); + if (FAILED(hr)) { + PyErr_SetFromWindowsErr(0); + return NULL; + } + + PyObject *name_obj = PyUnicode_FromWideChar(name, -1); + LocalFree(name); + return name_obj; +#endif } #endif // HAVE_PTHREAD_GETNAME_NP -#ifdef HAVE_PTHREAD_SETNAME_NP +#if defined(HAVE_PTHREAD_SETNAME_NP) || defined(MS_WINDOWS) /*[clinic input] _thread.set_name @@ -2406,6 +2445,7 @@ static PyObject * _thread_set_name_impl(PyObject *module, PyObject *name_obj) /*[clinic end generated code: output=402b0c68e0c0daed input=7e7acd98261be82f]*/ { +#ifndef MS_WINDOWS #ifdef __sun // Solaris always uses UTF-8 const char *encoding = "utf-8"; @@ -2421,12 +2461,12 @@ _thread_set_name_impl(PyObject *module, PyObject *name_obj) return NULL; } -#ifdef PYTHREAD_NAME_MAXLEN - // Truncate to PYTHREAD_NAME_MAXLEN bytes + the NUL byte if needed - if (PyBytes_GET_SIZE(name_encoded) > PYTHREAD_NAME_MAXLEN) { +#ifdef _PYTHREAD_NAME_MAXLEN + // Truncate to _PYTHREAD_NAME_MAXLEN bytes + the NUL byte if needed + if (PyBytes_GET_SIZE(name_encoded) > _PYTHREAD_NAME_MAXLEN) { PyObject *truncated; truncated = PyBytes_FromStringAndSize(PyBytes_AS_STRING(name_encoded), - PYTHREAD_NAME_MAXLEN); + _PYTHREAD_NAME_MAXLEN); if (truncated == NULL) { Py_DECREF(name_encoded); return NULL; @@ -2451,6 +2491,35 @@ _thread_set_name_impl(PyObject *module, PyObject *name_obj) return PyErr_SetFromErrno(PyExc_OSError); } Py_RETURN_NONE; +#else + // Windows implementation + assert(pSetThreadDescription != NULL); + + Py_ssize_t len; + wchar_t *name = PyUnicode_AsWideCharString(name_obj, &len); + if (name == NULL) { + return NULL; + } + + if (len > _PYTHREAD_NAME_MAXLEN) { + // Truncate the name + Py_UCS4 ch = name[_PYTHREAD_NAME_MAXLEN-1]; + if (Py_UNICODE_IS_HIGH_SURROGATE(ch)) { + name[_PYTHREAD_NAME_MAXLEN-1] = 0; + } + else { + name[_PYTHREAD_NAME_MAXLEN] = 0; + } + } + + HRESULT hr = pSetThreadDescription(GetCurrentThread(), name); + PyMem_Free(name); + if (FAILED(hr)) { + PyErr_SetFromWindowsErr((int)hr); + return NULL; + } + Py_RETURN_NONE; +#endif } #endif // HAVE_PTHREAD_SETNAME_NP @@ -2587,13 +2656,38 @@ thread_module_exec(PyObject *module) llist_init(&state->shutdown_handles); -#ifdef PYTHREAD_NAME_MAXLEN +#ifdef _PYTHREAD_NAME_MAXLEN if (PyModule_AddIntConstant(module, "_NAME_MAXLEN", - PYTHREAD_NAME_MAXLEN) < 0) { + _PYTHREAD_NAME_MAXLEN) < 0) { return -1; } #endif +#ifdef MS_WINDOWS + HMODULE kernelbase = GetModuleHandleW(L"kernelbase.dll"); + if (kernelbase != NULL) { + if (pGetThreadDescription == NULL) { + pGetThreadDescription = (PF_GET_THREAD_DESCRIPTION)GetProcAddress( + kernelbase, "GetThreadDescription"); + } + if (pSetThreadDescription == NULL) { + pSetThreadDescription = (PF_SET_THREAD_DESCRIPTION)GetProcAddress( + kernelbase, "SetThreadDescription"); + } + } + + if (pGetThreadDescription == NULL) { + if (PyObject_DelAttrString(module, "_get_name") < 0) { + return -1; + } + } + if (pSetThreadDescription == NULL) { + if (PyObject_DelAttrString(module, "set_name") < 0) { + return -1; + } + } +#endif + return 0; } diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 887a1e820e250e..be71fc9fc9c341 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -215,18 +215,14 @@ static struct PyModuleDef module_def = { PyMODINIT_FUNC PyInit__tracemalloc(void) { - PyObject *m; - m = PyModule_Create(&module_def); - if (m == NULL) + PyObject *mod = PyModule_Create(&module_def); + if (mod == NULL) { return NULL; + } + #ifdef Py_GIL_DISABLED - PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED); + PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED); #endif - if (_PyTraceMalloc_Init() < 0) { - Py_DECREF(m); - return NULL; - } - - return m; + return mod; } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 260cab48091c16..786a828f00908c 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -171,17 +171,16 @@ overlapped_dealloc(OverlappedObject *self) { /* The operation is no longer pending -- nothing to do. */ } - else if (_Py_IsInterpreterFinalizing(_PyInterpreterState_GET())) - { + else if (_Py_IsInterpreterFinalizing(_PyInterpreterState_GET())) { /* The operation is still pending -- give a warning. This will probably only happen on Windows XP. */ PyErr_SetString(PyExc_PythonFinalizationError, "I/O operations still in flight while destroying " "Overlapped object, the process may crash"); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while deallocating " + "overlapped operation %R", self); } - else - { + else { /* The operation is still pending, but the process is probably about to exit, so we need not worry too much about memory leaks. Leaking self prevents a potential diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index c5292575c22f23..1fcea9ce8b1261 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -782,7 +782,7 @@ zoneinfo_reduce(PyObject *obj_self, PyObject *unused) if (self->source == SOURCE_FILE) { // Objects constructed from files cannot be pickled. PyObject *pickle_error = - _PyImport_GetModuleAttrString("pickle", "PicklingError"); + PyImport_ImportModuleAttrString("pickle", "PicklingError"); if (pickle_error == NULL) { return NULL; } @@ -2554,7 +2554,7 @@ static PyObject * new_weak_cache(void) { PyObject *WeakValueDictionary = - _PyImport_GetModuleAttrString("weakref", "WeakValueDictionary"); + PyImport_ImportModuleAttrString("weakref", "WeakValueDictionary"); if (WeakValueDictionary == NULL) { return NULL; } @@ -2732,12 +2732,12 @@ zoneinfomodule_exec(PyObject *m) /* Populate imports */ state->_tzpath_find_tzfile = - _PyImport_GetModuleAttrString("zoneinfo._tzpath", "find_tzfile"); + PyImport_ImportModuleAttrString("zoneinfo._tzpath", "find_tzfile"); if (state->_tzpath_find_tzfile == NULL) { goto error; } - state->io_open = _PyImport_GetModuleAttrString("io", "open"); + state->io_open = PyImport_ImportModuleAttrString("io", "open"); if (state->io_open == NULL) { goto error; } diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index b80c964f20d65e..dc1729a7a3a558 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -1557,7 +1557,7 @@ array_array_fromfile_impl(arrayobject *self, PyTypeObject *cls, PyObject *f, not_enough_bytes = (PyBytes_GET_SIZE(b) != nbytes); - res = array_array_frombytes(self, b); + res = array_array_frombytes((PyObject *)self, b); Py_DECREF(b); if (res == NULL) return NULL; @@ -2285,7 +2285,7 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, assert(state != NULL); if (state->array_reconstructor == NULL) { - state->array_reconstructor = _PyImport_GetModuleAttrString( + state->array_reconstructor = PyImport_ImportModuleAttrString( "array", "_array_reconstructor"); if (state->array_reconstructor == NULL) { return NULL; @@ -2797,8 +2797,7 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds) else if (initial != NULL && (PyByteArray_Check(initial) || PyBytes_Check(initial))) { PyObject *v; - v = array_array_frombytes((arrayobject *)a, - initial); + v = array_array_frombytes((PyObject *)a, initial); if (v == NULL) { Py_DECREF(a); return NULL; @@ -3090,11 +3089,16 @@ array_arrayiterator___setstate__(arrayiterobject *self, PyObject *state) Py_ssize_t index = PyLong_AsSsize_t(state); if (index == -1 && PyErr_Occurred()) return NULL; - if (index < 0) - index = 0; - else if (index > Py_SIZE(self->ao)) - index = Py_SIZE(self->ao); /* iterator exhausted */ - self->index = index; + arrayobject *ao = self->ao; + if (ao != NULL) { + if (index < 0) { + index = 0; + } + else if (index > Py_SIZE(ao)) { + index = Py_SIZE(ao); /* iterator exhausted */ + } + self->index = index; + } Py_RETURN_NONE; } @@ -3202,7 +3206,7 @@ array_modexec(PyObject *m) return -1; } - PyObject *mutablesequence = _PyImport_GetModuleAttrString( + PyObject *mutablesequence = PyImport_ImportModuleAttrString( "collections.abc", "MutableSequence"); if (!mutablesequence) { Py_DECREF((PyObject *)state->ArrayType); diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c index 1b89b32ba907d7..2bfdda53af8cb2 100644 --- a/Modules/atexitmodule.c +++ b/Modules/atexitmodule.c @@ -110,7 +110,8 @@ atexit_callfuncs(struct atexit_state *state) PyObject *copy = PyList_GetSlice(state->callbacks, 0, PyList_GET_SIZE(state->callbacks)); if (copy == NULL) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "copying atexit callbacks"); return; } diff --git a/Modules/blake2module.c b/Modules/blake2module.c index 6723e7de4675a5..016c834c01bbe2 100644 --- a/Modules/blake2module.c +++ b/Modules/blake2module.c @@ -366,6 +366,8 @@ typedef struct { PyMutex mutex; } Blake2Object; +#define _Blake2Object_CAST(op) ((Blake2Object *)(op)) + #include "clinic/blake2module.c.h" /*[clinic input] @@ -849,24 +851,27 @@ static PyMethodDef py_blake2b_methods[] = { static PyObject * -py_blake2b_get_name(Blake2Object *self, void *closure) +py_blake2b_get_name(PyObject *op, void *Py_UNUSED(closure)) { + Blake2Object *self = _Blake2Object_CAST(op); return PyUnicode_FromString(is_blake2b(self->impl) ? "blake2b" : "blake2s"); } static PyObject * -py_blake2b_get_block_size(Blake2Object *self, void *closure) +py_blake2b_get_block_size(PyObject *op, void *Py_UNUSED(closure)) { + Blake2Object *self = _Blake2Object_CAST(op); return PyLong_FromLong(is_blake2b(self->impl) ? HACL_HASH_BLAKE2B_BLOCK_BYTES : HACL_HASH_BLAKE2S_BLOCK_BYTES); } static PyObject * -py_blake2b_get_digest_size(Blake2Object *self, void *closure) +py_blake2b_get_digest_size(PyObject *op, void *Py_UNUSED(closure)) { + Blake2Object *self = _Blake2Object_CAST(op); switch (self->impl) { #if HACL_CAN_COMPILE_SIMD256 case Blake2b_256: @@ -887,15 +892,13 @@ py_blake2b_get_digest_size(Blake2Object *self, void *closure) static PyGetSetDef py_blake2b_getsetters[] = { - {"name", (getter)py_blake2b_get_name, - NULL, NULL, NULL}, - {"block_size", (getter)py_blake2b_get_block_size, - NULL, NULL, NULL}, - {"digest_size", (getter)py_blake2b_get_digest_size, - NULL, NULL, NULL}, - {NULL} + {"name", py_blake2b_get_name, NULL, NULL, NULL}, + {"block_size", py_blake2b_get_block_size, NULL, NULL, NULL}, + {"digest_size", py_blake2b_get_digest_size, NULL, NULL, NULL}, + {NULL} /* Sentinel */ }; + static int py_blake2_clear(PyObject *op) { diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h index 2b446ba5226ac0..737a7a042753a9 100644 --- a/Modules/cjkcodecs/cjkcodecs.h +++ b/Modules/cjkcodecs/cjkcodecs.h @@ -13,7 +13,6 @@ #include "Python.h" #include "multibytecodec.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() /* a unicode "undefined" code point */ @@ -299,7 +298,7 @@ add_codecs(cjkcodecs_module_state *st) \ static PyObject * getmultibytecodec(void) { - return _PyImport_GetModuleAttrString("_multibytecodec", "__create_codec"); + return PyImport_ImportModuleAttrString("_multibytecodec", "__create_codec"); } static void diff --git a/Modules/cjkcodecs/clinic/multibytecodec.c.h b/Modules/cjkcodecs/clinic/multibytecodec.c.h index 7e7ea9e0fdfa8e..d77bbd48066354 100644 --- a/Modules/cjkcodecs/clinic/multibytecodec.c.h +++ b/Modules/cjkcodecs/clinic/multibytecodec.c.h @@ -28,7 +28,7 @@ _multibytecodec_MultibyteCodec_encode_impl(MultibyteCodecObject *self, const char *errors); static PyObject * -_multibytecodec_MultibyteCodec_encode(MultibyteCodecObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteCodec_encode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -89,7 +89,7 @@ _multibytecodec_MultibyteCodec_encode(MultibyteCodecObject *self, PyObject *cons goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteCodec_encode_impl(self, input, errors); + return_value = _multibytecodec_MultibyteCodec_encode_impl((MultibyteCodecObject *)self, input, errors); exit: return return_value; @@ -115,7 +115,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self, const char *errors); static PyObject * -_multibytecodec_MultibyteCodec_decode(MultibyteCodecObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteCodec_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -178,7 +178,7 @@ _multibytecodec_MultibyteCodec_decode(MultibyteCodecObject *self, PyObject *cons goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteCodec_decode_impl(self, &input, errors); + return_value = _multibytecodec_MultibyteCodec_decode_impl((MultibyteCodecObject *)self, &input, errors); exit: /* Cleanup for input */ @@ -203,7 +203,7 @@ _multibytecodec_MultibyteIncrementalEncoder_encode_impl(MultibyteIncrementalEnco int final); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteIncrementalEncoder_encode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -250,7 +250,7 @@ _multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderOb goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteIncrementalEncoder_encode_impl(self, input, final); + return_value = _multibytecodec_MultibyteIncrementalEncoder_encode_impl((MultibyteIncrementalEncoderObject *)self, input, final); exit: return return_value; @@ -268,9 +268,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalEncoder_getstate_impl(MultibyteIncrementalEncoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_getstate(MultibyteIncrementalEncoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalEncoder_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalEncoder_getstate_impl(self); + return _multibytecodec_MultibyteIncrementalEncoder_getstate_impl((MultibyteIncrementalEncoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteIncrementalEncoder_setstate__doc__, @@ -286,7 +286,7 @@ _multibytecodec_MultibyteIncrementalEncoder_setstate_impl(MultibyteIncrementalEn PyLongObject *statelong); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_setstate(MultibyteIncrementalEncoderObject *self, PyObject *arg) +_multibytecodec_MultibyteIncrementalEncoder_setstate(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyLongObject *statelong; @@ -296,7 +296,7 @@ _multibytecodec_MultibyteIncrementalEncoder_setstate(MultibyteIncrementalEncoder goto exit; } statelong = (PyLongObject *)arg; - return_value = _multibytecodec_MultibyteIncrementalEncoder_setstate_impl(self, statelong); + return_value = _multibytecodec_MultibyteIncrementalEncoder_setstate_impl((MultibyteIncrementalEncoderObject *)self, statelong); exit: return return_value; @@ -314,9 +314,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalEncoder_reset_impl(MultibyteIncrementalEncoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_reset(MultibyteIncrementalEncoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalEncoder_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalEncoder_reset_impl(self); + return _multibytecodec_MultibyteIncrementalEncoder_reset_impl((MultibyteIncrementalEncoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteIncrementalDecoder_decode__doc__, @@ -333,7 +333,7 @@ _multibytecodec_MultibyteIncrementalDecoder_decode_impl(MultibyteIncrementalDeco int final); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteIncrementalDecoder_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -382,7 +382,7 @@ _multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderOb goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteIncrementalDecoder_decode_impl(self, &input, final); + return_value = _multibytecodec_MultibyteIncrementalDecoder_decode_impl((MultibyteIncrementalDecoderObject *)self, &input, final); exit: /* Cleanup for input */ @@ -405,9 +405,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalDecoder_getstate_impl(MultibyteIncrementalDecoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_getstate(MultibyteIncrementalDecoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalDecoder_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalDecoder_getstate_impl(self); + return _multibytecodec_MultibyteIncrementalDecoder_getstate_impl((MultibyteIncrementalDecoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteIncrementalDecoder_setstate__doc__, @@ -423,7 +423,7 @@ _multibytecodec_MultibyteIncrementalDecoder_setstate_impl(MultibyteIncrementalDe PyObject *state); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_setstate(MultibyteIncrementalDecoderObject *self, PyObject *arg) +_multibytecodec_MultibyteIncrementalDecoder_setstate(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *state; @@ -433,7 +433,7 @@ _multibytecodec_MultibyteIncrementalDecoder_setstate(MultibyteIncrementalDecoder goto exit; } state = arg; - return_value = _multibytecodec_MultibyteIncrementalDecoder_setstate_impl(self, state); + return_value = _multibytecodec_MultibyteIncrementalDecoder_setstate_impl((MultibyteIncrementalDecoderObject *)self, state); exit: return return_value; @@ -451,9 +451,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalDecoder_reset_impl(MultibyteIncrementalDecoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_reset(MultibyteIncrementalDecoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalDecoder_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalDecoder_reset_impl(self); + return _multibytecodec_MultibyteIncrementalDecoder_reset_impl((MultibyteIncrementalDecoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteStreamReader_read__doc__, @@ -469,7 +469,7 @@ _multibytecodec_MultibyteStreamReader_read_impl(MultibyteStreamReaderObject *sel PyObject *sizeobj); static PyObject * -_multibytecodec_MultibyteStreamReader_read(MultibyteStreamReaderObject *self, PyObject *const *args, Py_ssize_t nargs) +_multibytecodec_MultibyteStreamReader_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sizeobj = Py_None; @@ -482,7 +482,7 @@ _multibytecodec_MultibyteStreamReader_read(MultibyteStreamReaderObject *self, Py } sizeobj = args[0]; skip_optional: - return_value = _multibytecodec_MultibyteStreamReader_read_impl(self, sizeobj); + return_value = _multibytecodec_MultibyteStreamReader_read_impl((MultibyteStreamReaderObject *)self, sizeobj); exit: return return_value; @@ -501,7 +501,7 @@ _multibytecodec_MultibyteStreamReader_readline_impl(MultibyteStreamReaderObject PyObject *sizeobj); static PyObject * -_multibytecodec_MultibyteStreamReader_readline(MultibyteStreamReaderObject *self, PyObject *const *args, Py_ssize_t nargs) +_multibytecodec_MultibyteStreamReader_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sizeobj = Py_None; @@ -514,7 +514,7 @@ _multibytecodec_MultibyteStreamReader_readline(MultibyteStreamReaderObject *self } sizeobj = args[0]; skip_optional: - return_value = _multibytecodec_MultibyteStreamReader_readline_impl(self, sizeobj); + return_value = _multibytecodec_MultibyteStreamReader_readline_impl((MultibyteStreamReaderObject *)self, sizeobj); exit: return return_value; @@ -533,7 +533,7 @@ _multibytecodec_MultibyteStreamReader_readlines_impl(MultibyteStreamReaderObject PyObject *sizehintobj); static PyObject * -_multibytecodec_MultibyteStreamReader_readlines(MultibyteStreamReaderObject *self, PyObject *const *args, Py_ssize_t nargs) +_multibytecodec_MultibyteStreamReader_readlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sizehintobj = Py_None; @@ -546,7 +546,7 @@ _multibytecodec_MultibyteStreamReader_readlines(MultibyteStreamReaderObject *sel } sizehintobj = args[0]; skip_optional: - return_value = _multibytecodec_MultibyteStreamReader_readlines_impl(self, sizehintobj); + return_value = _multibytecodec_MultibyteStreamReader_readlines_impl((MultibyteStreamReaderObject *)self, sizehintobj); exit: return return_value; @@ -564,9 +564,9 @@ static PyObject * _multibytecodec_MultibyteStreamReader_reset_impl(MultibyteStreamReaderObject *self); static PyObject * -_multibytecodec_MultibyteStreamReader_reset(MultibyteStreamReaderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteStreamReader_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteStreamReader_reset_impl(self); + return _multibytecodec_MultibyteStreamReader_reset_impl((MultibyteStreamReaderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteStreamWriter_write__doc__, @@ -583,7 +583,7 @@ _multibytecodec_MultibyteStreamWriter_write_impl(MultibyteStreamWriterObject *se PyObject *strobj); static PyObject * -_multibytecodec_MultibyteStreamWriter_write(MultibyteStreamWriterObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteStreamWriter_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -608,7 +608,7 @@ _multibytecodec_MultibyteStreamWriter_write(MultibyteStreamWriterObject *self, P goto exit; } strobj = args[0]; - return_value = _multibytecodec_MultibyteStreamWriter_write_impl(self, cls, strobj); + return_value = _multibytecodec_MultibyteStreamWriter_write_impl((MultibyteStreamWriterObject *)self, cls, strobj); exit: return return_value; @@ -628,7 +628,7 @@ _multibytecodec_MultibyteStreamWriter_writelines_impl(MultibyteStreamWriterObjec PyObject *lines); static PyObject * -_multibytecodec_MultibyteStreamWriter_writelines(MultibyteStreamWriterObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteStreamWriter_writelines(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -653,7 +653,7 @@ _multibytecodec_MultibyteStreamWriter_writelines(MultibyteStreamWriterObject *se goto exit; } lines = args[0]; - return_value = _multibytecodec_MultibyteStreamWriter_writelines_impl(self, cls, lines); + return_value = _multibytecodec_MultibyteStreamWriter_writelines_impl((MultibyteStreamWriterObject *)self, cls, lines); exit: return return_value; @@ -672,13 +672,13 @@ _multibytecodec_MultibyteStreamWriter_reset_impl(MultibyteStreamWriterObject *se PyTypeObject *cls); static PyObject * -_multibytecodec_MultibyteStreamWriter_reset(MultibyteStreamWriterObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteStreamWriter_reset(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "reset() takes no arguments"); return NULL; } - return _multibytecodec_MultibyteStreamWriter_reset_impl(self, cls); + return _multibytecodec_MultibyteStreamWriter_reset_impl((MultibyteStreamWriterObject *)self, cls); } PyDoc_STRVAR(_multibytecodec___create_codec__doc__, @@ -688,4 +688,4 @@ PyDoc_STRVAR(_multibytecodec___create_codec__doc__, #define _MULTIBYTECODEC___CREATE_CODEC_METHODDEF \ {"__create_codec", (PyCFunction)_multibytecodec___create_codec, METH_O, _multibytecodec___create_codec__doc__}, -/*[clinic end generated code: output=60e1fa3a7615c148 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6571941b8e45b013 input=a9049054013a1b77]*/ diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 53135ae4aa7968..08b74740bda4bf 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -56,6 +56,27 @@ class _multibytecodec.MultibyteStreamWriter "MultibyteStreamWriterObject *" "cli /*[clinic end generated code: output=da39a3ee5e6b4b0d input=305a76dfdd24b99c]*/ #undef clinic_get_state +#define _MultibyteCodec_CAST(op) ((MultibyteCodec *)(op)) +#define _MultibyteCodecObject_CAST(op) ((MultibyteCodecObject *)(op)) + +#define _MultibyteStatefulCodecContext_CAST(op) \ + ((MultibyteStatefulCodecContext *)(op)) + +#define _MultibyteStatefulEncoderContext_CAST(op) \ + ((MultibyteStatefulEncoderContext *)(op)) +#define _MultibyteStatefulDecoderContext_CAST(op) \ + ((MultibyteStatefulDecoderContext *)(op)) + +#define _MultibyteIncrementalEncoderObject_CAST(op) \ + ((MultibyteIncrementalEncoderObject *)(op)) +#define _MultibyteIncrementalDecoderObject_CAST(op) \ + ((MultibyteIncrementalDecoderObject *)(op)) + +#define _MultibyteStreamReaderObject_CAST(op) \ + ((MultibyteStreamReaderObject *)(op)) +#define _MultibyteStreamWriterObject_CAST(op) \ + ((MultibyteStreamWriterObject *)(op)) + typedef struct { PyObject *inobj; Py_ssize_t inpos, inlen; @@ -136,9 +157,10 @@ call_error_callback(PyObject *errors, PyObject *exc) } static PyObject * -codecctx_errors_get(MultibyteStatefulCodecContext *self, void *Py_UNUSED(ignored)) +codecctx_errors_get(PyObject *op, void *Py_UNUSED(closure)) { const char *errors; + MultibyteStatefulCodecContext *self = _MultibyteStatefulCodecContext_CAST(op); if (self->errors == ERROR_STRICT) errors = "strict"; @@ -154,11 +176,11 @@ codecctx_errors_get(MultibyteStatefulCodecContext *self, void *Py_UNUSED(ignored } static int -codecctx_errors_set(MultibyteStatefulCodecContext *self, PyObject *value, - void *closure) +codecctx_errors_set(PyObject *op, PyObject *value, void *Py_UNUSED(closure)) { PyObject *cb; const char *str; + MultibyteStatefulCodecContext *self = _MultibyteStatefulCodecContext_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); @@ -184,9 +206,8 @@ codecctx_errors_set(MultibyteStatefulCodecContext *self, PyObject *value, /* This getset handlers list is used by all the stateful codec objects */ static PyGetSetDef codecctx_getsets[] = { - {"errors", (getter)codecctx_errors_get, - (setter)codecctx_errors_set, - PyDoc_STR("how to treat errors")}, + {"errors", codecctx_errors_get, codecctx_errors_set, + PyDoc_STR("how to treat errors")}, {NULL,} }; @@ -719,22 +740,24 @@ static struct PyMethodDef multibytecodec_methods[] = { }; static int -multibytecodec_clear(MultibyteCodecObject *self) +multibytecodec_clear(PyObject *op) { + MultibyteCodecObject *self = _MultibyteCodecObject_CAST(op); Py_CLEAR(self->cjk_module); return 0; } static int -multibytecodec_traverse(MultibyteCodecObject *self, visitproc visit, void *arg) +multibytecodec_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteCodecObject *self = _MultibyteCodecObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->cjk_module); return 0; } static void -multibytecodec_dealloc(MultibyteCodecObject *self) +multibytecodec_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); PyTypeObject *tp = Py_TYPE(self); @@ -1106,17 +1129,18 @@ mbiencoder_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbiencoder_traverse(MultibyteIncrementalEncoderObject *self, - visitproc visit, void *arg) +mbiencoder_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteIncrementalEncoderObject *self = _MultibyteIncrementalEncoderObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); return 0; } static void -mbiencoder_dealloc(MultibyteIncrementalEncoderObject *self) +mbiencoder_dealloc(PyObject *op) { + MultibyteIncrementalEncoderObject *self = _MultibyteIncrementalEncoderObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -1388,17 +1412,18 @@ mbidecoder_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbidecoder_traverse(MultibyteIncrementalDecoderObject *self, - visitproc visit, void *arg) +mbidecoder_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteIncrementalDecoderObject *self = _MultibyteIncrementalDecoderObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); return 0; } static void -mbidecoder_dealloc(MultibyteIncrementalDecoderObject *self) +mbidecoder_dealloc(PyObject *op) { + MultibyteIncrementalDecoderObject *self = _MultibyteIncrementalDecoderObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -1704,9 +1729,9 @@ mbstreamreader_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbstreamreader_traverse(MultibyteStreamReaderObject *self, - visitproc visit, void *arg) +mbstreamreader_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteStreamReaderObject *self = _MultibyteStreamReaderObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); Py_VISIT(self->stream); @@ -1714,8 +1739,9 @@ mbstreamreader_traverse(MultibyteStreamReaderObject *self, } static void -mbstreamreader_dealloc(MultibyteStreamReaderObject *self) +mbstreamreader_dealloc(PyObject *op) { + MultibyteStreamReaderObject *self = _MultibyteStreamReaderObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -1927,9 +1953,9 @@ mbstreamwriter_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbstreamwriter_traverse(MultibyteStreamWriterObject *self, - visitproc visit, void *arg) +mbstreamwriter_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteStreamWriterObject *self = _MultibyteStreamWriterObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); Py_VISIT(self->stream); @@ -1937,8 +1963,9 @@ mbstreamwriter_traverse(MultibyteStreamWriterObject *self, } static void -mbstreamwriter_dealloc(MultibyteStreamWriterObject *self) +mbstreamwriter_dealloc(PyObject *op) { + MultibyteStreamWriterObject *self = _MultibyteStreamWriterObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -2044,7 +2071,7 @@ _multibytecodec_clear(PyObject *mod) static void _multibytecodec_free(void *mod) { - _multibytecodec_clear((PyObject *)mod); + (void)_multibytecodec_clear((PyObject *)mod); } #define CREATE_TYPE(module, type, spec) \ diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index 3a37cdd9b5fa83..c6b7e39788be71 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -9,6 +9,31 @@ preserve #include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() +#if !defined(_asyncio_Future__asyncio_awaited_by_DOCSTR) +# define _asyncio_Future__asyncio_awaited_by_DOCSTR NULL +#endif +#if defined(_ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF) +# undef _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF +# define _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF {"_asyncio_awaited_by", (getter)_asyncio_Future__asyncio_awaited_by_get, (setter)_asyncio_Future__asyncio_awaited_by_set, _asyncio_Future__asyncio_awaited_by_DOCSTR}, +#else +# define _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF {"_asyncio_awaited_by", (getter)_asyncio_Future__asyncio_awaited_by_get, NULL, _asyncio_Future__asyncio_awaited_by_DOCSTR}, +#endif + +static PyObject * +_asyncio_Future__asyncio_awaited_by_get_impl(FutureObj *self); + +static PyObject * +_asyncio_Future__asyncio_awaited_by_get(PyObject *self, void *Py_UNUSED(context)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _asyncio_Future__asyncio_awaited_by_get_impl((FutureObj *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + PyDoc_STRVAR(_asyncio_Future___init____doc__, "Future(*, loop=None)\n" "--\n" @@ -97,12 +122,12 @@ static PyObject * _asyncio_Future_result_impl(FutureObj *self); static PyObject * -_asyncio_Future_result(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_result(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_result_impl(self); + return_value = _asyncio_Future_result_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -126,7 +151,7 @@ static PyObject * _asyncio_Future_exception_impl(FutureObj *self, PyTypeObject *cls); static PyObject * -_asyncio_Future_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_exception(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; @@ -135,7 +160,7 @@ _asyncio_Future_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *a goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_exception_impl(self, cls); + return_value = _asyncio_Future_exception_impl((FutureObj *)self, cls); Py_END_CRITICAL_SECTION(); exit: @@ -159,7 +184,7 @@ _asyncio_Future_set_result_impl(FutureObj *self, PyTypeObject *cls, PyObject *result); static PyObject * -_asyncio_Future_set_result(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_set_result(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -185,7 +210,7 @@ _asyncio_Future_set_result(FutureObj *self, PyTypeObject *cls, PyObject *const * } result = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_set_result_impl(self, cls, result); + return_value = _asyncio_Future_set_result_impl((FutureObj *)self, cls, result); Py_END_CRITICAL_SECTION(); exit: @@ -209,7 +234,7 @@ _asyncio_Future_set_exception_impl(FutureObj *self, PyTypeObject *cls, PyObject *exception); static PyObject * -_asyncio_Future_set_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_set_exception(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -235,7 +260,7 @@ _asyncio_Future_set_exception(FutureObj *self, PyTypeObject *cls, PyObject *cons } exception = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_set_exception_impl(self, cls, exception); + return_value = _asyncio_Future_set_exception_impl((FutureObj *)self, cls, exception); Py_END_CRITICAL_SECTION(); exit: @@ -260,7 +285,7 @@ _asyncio_Future_add_done_callback_impl(FutureObj *self, PyTypeObject *cls, PyObject *fn, PyObject *context); static PyObject * -_asyncio_Future_add_done_callback(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_add_done_callback(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -305,7 +330,7 @@ _asyncio_Future_add_done_callback(FutureObj *self, PyTypeObject *cls, PyObject * context = args[1]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_add_done_callback_impl(self, cls, fn, context); + return_value = _asyncio_Future_add_done_callback_impl((FutureObj *)self, cls, fn, context); Py_END_CRITICAL_SECTION(); exit: @@ -328,7 +353,7 @@ _asyncio_Future_remove_done_callback_impl(FutureObj *self, PyTypeObject *cls, PyObject *fn); static PyObject * -_asyncio_Future_remove_done_callback(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_remove_done_callback(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -354,7 +379,7 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyTypeObject *cls, PyObjec } fn = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_remove_done_callback_impl(self, cls, fn); + return_value = _asyncio_Future_remove_done_callback_impl((FutureObj *)self, cls, fn); Py_END_CRITICAL_SECTION(); exit: @@ -379,7 +404,7 @@ _asyncio_Future_cancel_impl(FutureObj *self, PyTypeObject *cls, PyObject *msg); static PyObject * -_asyncio_Future_cancel(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_cancel(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -422,7 +447,7 @@ _asyncio_Future_cancel(FutureObj *self, PyTypeObject *cls, PyObject *const *args msg = args[0]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_cancel_impl(self, cls, msg); + return_value = _asyncio_Future_cancel_impl((FutureObj *)self, cls, msg); Py_END_CRITICAL_SECTION(); exit: @@ -442,12 +467,12 @@ static PyObject * _asyncio_Future_cancelled_impl(FutureObj *self); static PyObject * -_asyncio_Future_cancelled(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_cancelled(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_cancelled_impl(self); + return_value = _asyncio_Future_cancelled_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -469,12 +494,12 @@ static PyObject * _asyncio_Future_done_impl(FutureObj *self); static PyObject * -_asyncio_Future_done(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_done(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_done_impl(self); + return_value = _asyncio_Future_done_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -493,7 +518,7 @@ static PyObject * _asyncio_Future_get_loop_impl(FutureObj *self, PyTypeObject *cls); static PyObject * -_asyncio_Future_get_loop(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_get_loop(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; @@ -502,7 +527,7 @@ _asyncio_Future_get_loop(FutureObj *self, PyTypeObject *cls, PyObject *const *ar goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_get_loop_impl(self, cls); + return_value = _asyncio_Future_get_loop_impl((FutureObj *)self, cls); Py_END_CRITICAL_SECTION(); exit: @@ -523,12 +548,12 @@ static PyObject * _asyncio_Future__asyncio_future_blocking_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__asyncio_future_blocking_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__asyncio_future_blocking_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__asyncio_future_blocking_get_impl(self); + return_value = _asyncio_Future__asyncio_future_blocking_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -549,12 +574,12 @@ _asyncio_Future__asyncio_future_blocking_set_impl(FutureObj *self, PyObject *value); static int -_asyncio_Future__asyncio_future_blocking_set(FutureObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Future__asyncio_future_blocking_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__asyncio_future_blocking_set_impl(self, value); + return_value = _asyncio_Future__asyncio_future_blocking_set_impl((FutureObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -574,12 +599,12 @@ static PyObject * _asyncio_Future__log_traceback_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__log_traceback_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__log_traceback_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__log_traceback_get_impl(self); + return_value = _asyncio_Future__log_traceback_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -599,12 +624,12 @@ static int _asyncio_Future__log_traceback_set_impl(FutureObj *self, PyObject *value); static int -_asyncio_Future__log_traceback_set(FutureObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Future__log_traceback_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__log_traceback_set_impl(self, value); + return_value = _asyncio_Future__log_traceback_set_impl((FutureObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -624,12 +649,12 @@ static PyObject * _asyncio_Future__loop_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__loop_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__loop_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__loop_get_impl(self); + return_value = _asyncio_Future__loop_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -649,12 +674,12 @@ static PyObject * _asyncio_Future__callbacks_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__callbacks_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__callbacks_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__callbacks_get_impl(self); + return_value = _asyncio_Future__callbacks_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -674,12 +699,12 @@ static PyObject * _asyncio_Future__result_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__result_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__result_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__result_get_impl(self); + return_value = _asyncio_Future__result_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -699,12 +724,12 @@ static PyObject * _asyncio_Future__exception_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__exception_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__exception_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__exception_get_impl(self); + return_value = _asyncio_Future__exception_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -724,12 +749,12 @@ static PyObject * _asyncio_Future__source_traceback_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__source_traceback_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__source_traceback_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__source_traceback_get_impl(self); + return_value = _asyncio_Future__source_traceback_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -749,12 +774,12 @@ static PyObject * _asyncio_Future__cancel_message_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__cancel_message_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__cancel_message_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__cancel_message_get_impl(self); + return_value = _asyncio_Future__cancel_message_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -774,12 +799,12 @@ static int _asyncio_Future__cancel_message_set_impl(FutureObj *self, PyObject *value); static int -_asyncio_Future__cancel_message_set(FutureObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Future__cancel_message_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__cancel_message_set_impl(self, value); + return_value = _asyncio_Future__cancel_message_set_impl((FutureObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -799,12 +824,12 @@ static PyObject * _asyncio_Future__state_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__state_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__state_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__state_get_impl(self); + return_value = _asyncio_Future__state_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -826,12 +851,12 @@ static PyObject * _asyncio_Future__make_cancelled_error_impl(FutureObj *self); static PyObject * -_asyncio_Future__make_cancelled_error(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future__make_cancelled_error(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__make_cancelled_error_impl(self); + return_value = _asyncio_Future__make_cancelled_error_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -939,12 +964,12 @@ static PyObject * _asyncio_Task__log_destroy_pending_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__log_destroy_pending_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__log_destroy_pending_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__log_destroy_pending_get_impl(self); + return_value = _asyncio_Task__log_destroy_pending_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -964,12 +989,12 @@ static int _asyncio_Task__log_destroy_pending_set_impl(TaskObj *self, PyObject *value); static int -_asyncio_Task__log_destroy_pending_set(TaskObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Task__log_destroy_pending_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__log_destroy_pending_set_impl(self, value); + return_value = _asyncio_Task__log_destroy_pending_set_impl((TaskObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -989,12 +1014,12 @@ static PyObject * _asyncio_Task__must_cancel_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__must_cancel_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__must_cancel_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__must_cancel_get_impl(self); + return_value = _asyncio_Task__must_cancel_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1014,12 +1039,12 @@ static PyObject * _asyncio_Task__coro_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__coro_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__coro_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__coro_get_impl(self); + return_value = _asyncio_Task__coro_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1039,12 +1064,12 @@ static PyObject * _asyncio_Task__fut_waiter_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__fut_waiter_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__fut_waiter_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__fut_waiter_get_impl(self); + return_value = _asyncio_Task__fut_waiter_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1066,12 +1091,12 @@ static PyObject * _asyncio_Task__make_cancelled_error_impl(TaskObj *self); static PyObject * -_asyncio_Task__make_cancelled_error(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task__make_cancelled_error(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__make_cancelled_error_impl(self); + return_value = _asyncio_Task__make_cancelled_error_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1109,7 +1134,7 @@ static PyObject * _asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg); static PyObject * -_asyncio_Task_cancel(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_cancel(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1152,7 +1177,7 @@ _asyncio_Task_cancel(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyO msg = args[0]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_cancel_impl(self, msg); + return_value = _asyncio_Task_cancel_impl((TaskObj *)self, msg); Py_END_CRITICAL_SECTION(); exit: @@ -1175,12 +1200,12 @@ static PyObject * _asyncio_Task_cancelling_impl(TaskObj *self); static PyObject * -_asyncio_Task_cancelling(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_cancelling(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_cancelling_impl(self); + return_value = _asyncio_Task_cancelling_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1204,12 +1229,12 @@ static PyObject * _asyncio_Task_uncancel_impl(TaskObj *self); static PyObject * -_asyncio_Task_uncancel(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_uncancel(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_uncancel_impl(self); + return_value = _asyncio_Task_uncancel_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1247,7 +1272,7 @@ _asyncio_Task_get_stack_impl(TaskObj *self, PyTypeObject *cls, PyObject *limit); static PyObject * -_asyncio_Task_get_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_get_stack(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1289,7 +1314,7 @@ _asyncio_Task_get_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, } limit = args[0]; skip_optional_kwonly: - return_value = _asyncio_Task_get_stack_impl(self, cls, limit); + return_value = _asyncio_Task_get_stack_impl((TaskObj *)self, cls, limit); exit: return return_value; @@ -1315,7 +1340,7 @@ _asyncio_Task_print_stack_impl(TaskObj *self, PyTypeObject *cls, PyObject *limit, PyObject *file); static PyObject * -_asyncio_Task_print_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_print_stack(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1364,7 +1389,7 @@ _asyncio_Task_print_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *arg } file = args[1]; skip_optional_kwonly: - return_value = _asyncio_Task_print_stack_impl(self, cls, limit, file); + return_value = _asyncio_Task_print_stack_impl((TaskObj *)self, cls, limit, file); exit: return return_value; @@ -1398,12 +1423,12 @@ static PyObject * _asyncio_Task_get_coro_impl(TaskObj *self); static PyObject * -_asyncio_Task_get_coro(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_get_coro(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_get_coro_impl(self); + return_value = _asyncio_Task_get_coro_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1421,9 +1446,9 @@ static PyObject * _asyncio_Task_get_context_impl(TaskObj *self); static PyObject * -_asyncio_Task_get_context(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_get_context(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _asyncio_Task_get_context_impl(self); + return _asyncio_Task_get_context_impl((TaskObj *)self); } PyDoc_STRVAR(_asyncio_Task_get_name__doc__, @@ -1438,12 +1463,12 @@ static PyObject * _asyncio_Task_get_name_impl(TaskObj *self); static PyObject * -_asyncio_Task_get_name(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_get_name(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_get_name_impl(self); + return_value = _asyncio_Task_get_name_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1466,7 +1491,7 @@ _asyncio_Task_set_name(TaskObj *self, PyObject *value) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_set_name_impl(self, value); + return_value = _asyncio_Task_set_name_impl((TaskObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -2088,4 +2113,65 @@ _asyncio_all_tasks(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py exit: return return_value; } -/*[clinic end generated code: output=408e156476ced07f input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_asyncio_future_add_to_awaited_by__doc__, +"future_add_to_awaited_by($module, fut, waiter, /)\n" +"--\n" +"\n" +"Record that `fut` is awaited on by `waiter`."); + +#define _ASYNCIO_FUTURE_ADD_TO_AWAITED_BY_METHODDEF \ + {"future_add_to_awaited_by", _PyCFunction_CAST(_asyncio_future_add_to_awaited_by), METH_FASTCALL, _asyncio_future_add_to_awaited_by__doc__}, + +static PyObject * +_asyncio_future_add_to_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter); + +static PyObject * +_asyncio_future_add_to_awaited_by(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *fut; + PyObject *waiter; + + if (!_PyArg_CheckPositional("future_add_to_awaited_by", nargs, 2, 2)) { + goto exit; + } + fut = args[0]; + waiter = args[1]; + return_value = _asyncio_future_add_to_awaited_by_impl(module, fut, waiter); + +exit: + return return_value; +} + +PyDoc_STRVAR(_asyncio_future_discard_from_awaited_by__doc__, +"future_discard_from_awaited_by($module, fut, waiter, /)\n" +"--\n" +"\n"); + +#define _ASYNCIO_FUTURE_DISCARD_FROM_AWAITED_BY_METHODDEF \ + {"future_discard_from_awaited_by", _PyCFunction_CAST(_asyncio_future_discard_from_awaited_by), METH_FASTCALL, _asyncio_future_discard_from_awaited_by__doc__}, + +static PyObject * +_asyncio_future_discard_from_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter); + +static PyObject * +_asyncio_future_discard_from_awaited_by(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *fut; + PyObject *waiter; + + if (!_PyArg_CheckPositional("future_discard_from_awaited_by", nargs, 2, 2)) { + goto exit; + } + fut = args[0]; + waiter = args[1]; + return_value = _asyncio_future_discard_from_awaited_by_impl(module, fut, waiter); + +exit: + return return_value; +} +/*[clinic end generated code: output=fe4ffe08404ad566 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h index 93988bf48a1b00..a599bd1a8be96a 100644 --- a/Modules/clinic/_bz2module.c.h +++ b/Modules/clinic/_bz2module.c.h @@ -27,7 +27,7 @@ static PyObject * _bz2_BZ2Compressor_compress_impl(BZ2Compressor *self, Py_buffer *data); static PyObject * -_bz2_BZ2Compressor_compress(BZ2Compressor *self, PyObject *arg) +_bz2_BZ2Compressor_compress(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer data = {NULL, NULL}; @@ -35,7 +35,7 @@ _bz2_BZ2Compressor_compress(BZ2Compressor *self, PyObject *arg) if (PyObject_GetBuffer(arg, &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _bz2_BZ2Compressor_compress_impl(self, &data); + return_value = _bz2_BZ2Compressor_compress_impl((BZ2Compressor *)self, &data); exit: /* Cleanup for data */ @@ -63,9 +63,9 @@ static PyObject * _bz2_BZ2Compressor_flush_impl(BZ2Compressor *self); static PyObject * -_bz2_BZ2Compressor_flush(BZ2Compressor *self, PyObject *Py_UNUSED(ignored)) +_bz2_BZ2Compressor_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _bz2_BZ2Compressor_flush_impl(self); + return _bz2_BZ2Compressor_flush_impl((BZ2Compressor *)self); } PyDoc_STRVAR(_bz2_BZ2Compressor__doc__, @@ -137,7 +137,7 @@ _bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data, Py_ssize_t max_length); static PyObject * -_bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_bz2_BZ2Decompressor_decompress(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -194,7 +194,7 @@ _bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py max_length = ival; } skip_optional_pos: - return_value = _bz2_BZ2Decompressor_decompress_impl(self, &data, max_length); + return_value = _bz2_BZ2Decompressor_decompress_impl((BZ2Decompressor *)self, &data, max_length); exit: /* Cleanup for data */ @@ -235,4 +235,4 @@ _bz2_BZ2Decompressor(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=701a383434374c36 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0fc5a6292c5fd2c5 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_collectionsmodule.c.h b/Modules/clinic/_collectionsmodule.c.h index b4e3325e89502b..ddf18c2c77a8cd 100644 --- a/Modules/clinic/_collectionsmodule.c.h +++ b/Modules/clinic/_collectionsmodule.c.h @@ -23,12 +23,12 @@ static PyObject * deque_pop_impl(dequeobject *deque); static PyObject * -deque_pop(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_pop(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_pop_impl(deque); + return_value = deque_pop_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -47,12 +47,12 @@ static PyObject * deque_popleft_impl(dequeobject *deque); static PyObject * -deque_popleft(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_popleft(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_popleft_impl(deque); + return_value = deque_popleft_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -76,7 +76,7 @@ deque_append(dequeobject *deque, PyObject *item) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_append_impl(deque, item); + return_value = deque_append_impl((dequeobject *)deque, item); Py_END_CRITICAL_SECTION(); return return_value; @@ -100,7 +100,7 @@ deque_appendleft(dequeobject *deque, PyObject *item) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_appendleft_impl(deque, item); + return_value = deque_appendleft_impl((dequeobject *)deque, item); Py_END_CRITICAL_SECTION(); return return_value; @@ -124,7 +124,7 @@ deque_extend(dequeobject *deque, PyObject *iterable) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_extend_impl(deque, iterable); + return_value = deque_extend_impl((dequeobject *)deque, iterable); Py_END_CRITICAL_SECTION(); return return_value; @@ -148,7 +148,7 @@ deque_extendleft(dequeobject *deque, PyObject *iterable) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_extendleft_impl(deque, iterable); + return_value = deque_extendleft_impl((dequeobject *)deque, iterable); Py_END_CRITICAL_SECTION(); return return_value; @@ -167,12 +167,12 @@ static PyObject * deque_copy_impl(dequeobject *deque); static PyObject * -deque_copy(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_copy(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_copy_impl(deque); + return_value = deque_copy_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -191,12 +191,12 @@ static PyObject * deque___copy___impl(dequeobject *deque); static PyObject * -deque___copy__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___copy__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque___copy___impl(deque); + return_value = deque___copy___impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -215,12 +215,12 @@ static PyObject * deque_clearmethod_impl(dequeobject *deque); static PyObject * -deque_clearmethod(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_clearmethod(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_clearmethod_impl(deque); + return_value = deque_clearmethod_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -239,7 +239,7 @@ static PyObject * deque_rotate_impl(dequeobject *deque, Py_ssize_t n); static PyObject * -deque_rotate(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) +deque_rotate(PyObject *deque, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = 1; @@ -264,7 +264,7 @@ deque_rotate(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_rotate_impl(deque, n); + return_value = deque_rotate_impl((dequeobject *)deque, n); Py_END_CRITICAL_SECTION(); exit: @@ -284,12 +284,12 @@ static PyObject * deque_reverse_impl(dequeobject *deque); static PyObject * -deque_reverse(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_reverse(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_reverse_impl(deque); + return_value = deque_reverse_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -313,7 +313,7 @@ deque_count(dequeobject *deque, PyObject *v) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_count_impl(deque, v); + return_value = deque_count_impl((dequeobject *)deque, v); Py_END_CRITICAL_SECTION(); return return_value; @@ -335,7 +335,7 @@ deque_index_impl(dequeobject *deque, PyObject *v, Py_ssize_t start, Py_ssize_t stop); static PyObject * -deque_index(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) +deque_index(PyObject *deque, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *v; @@ -360,7 +360,7 @@ deque_index(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_index_impl(deque, v, start, stop); + return_value = deque_index_impl((dequeobject *)deque, v, start, stop); Py_END_CRITICAL_SECTION(); exit: @@ -380,7 +380,7 @@ static PyObject * deque_insert_impl(dequeobject *deque, Py_ssize_t index, PyObject *value); static PyObject * -deque_insert(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) +deque_insert(PyObject *deque, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -403,7 +403,7 @@ deque_insert(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } value = args[1]; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_insert_impl(deque, index, value); + return_value = deque_insert_impl((dequeobject *)deque, index, value); Py_END_CRITICAL_SECTION(); exit: @@ -428,7 +428,7 @@ deque_remove(dequeobject *deque, PyObject *value) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_remove_impl(deque, value); + return_value = deque_remove_impl((dequeobject *)deque, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -447,9 +447,9 @@ static PyObject * deque___reduce___impl(dequeobject *deque); static PyObject * -deque___reduce__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___reduce__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { - return deque___reduce___impl(deque); + return deque___reduce___impl((dequeobject *)deque); } PyDoc_STRVAR(deque_init__doc__, @@ -534,12 +534,12 @@ static PyObject * deque___sizeof___impl(dequeobject *deque); static PyObject * -deque___sizeof__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___sizeof__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque___sizeof___impl(deque); + return_value = deque___sizeof___impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -558,9 +558,9 @@ static PyObject * deque___reversed___impl(dequeobject *deque); static PyObject * -deque___reversed__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___reversed__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { - return deque___reversed___impl(deque); + return deque___reversed___impl((dequeobject *)deque); } PyDoc_STRVAR(_collections__count_elements__doc__, @@ -630,4 +630,4 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=65f896fb13902f6d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2d89c39288fc7389 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_curses_panel.c.h b/Modules/clinic/_curses_panel.c.h index b6bff5274a3a91..6f4966825ec4bf 100644 --- a/Modules/clinic/_curses_panel.c.h +++ b/Modules/clinic/_curses_panel.c.h @@ -20,13 +20,13 @@ static PyObject * _curses_panel_panel_bottom_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_bottom(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_bottom(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "bottom() takes no arguments"); return NULL; } - return _curses_panel_panel_bottom_impl(self, cls); + return _curses_panel_panel_bottom_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_hide__doc__, @@ -44,13 +44,13 @@ static PyObject * _curses_panel_panel_hide_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_hide(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_hide(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "hide() takes no arguments"); return NULL; } - return _curses_panel_panel_hide_impl(self, cls); + return _curses_panel_panel_hide_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_show__doc__, @@ -66,13 +66,13 @@ static PyObject * _curses_panel_panel_show_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_show(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_show(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "show() takes no arguments"); return NULL; } - return _curses_panel_panel_show_impl(self, cls); + return _curses_panel_panel_show_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_top__doc__, @@ -88,13 +88,13 @@ static PyObject * _curses_panel_panel_top_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_top(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_top(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "top() takes no arguments"); return NULL; } - return _curses_panel_panel_top_impl(self, cls); + return _curses_panel_panel_top_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_above__doc__, @@ -110,9 +110,9 @@ static PyObject * _curses_panel_panel_above_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_above(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_above(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_above_impl(self); + return _curses_panel_panel_above_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_below__doc__, @@ -128,9 +128,9 @@ static PyObject * _curses_panel_panel_below_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_below(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_below(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_below_impl(self); + return _curses_panel_panel_below_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_hidden__doc__, @@ -146,9 +146,9 @@ static PyObject * _curses_panel_panel_hidden_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_hidden(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_hidden(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_hidden_impl(self); + return _curses_panel_panel_hidden_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_move__doc__, @@ -165,7 +165,7 @@ _curses_panel_panel_move_impl(PyCursesPanelObject *self, PyTypeObject *cls, int y, int x); static PyObject * -_curses_panel_panel_move(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_move(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -198,7 +198,7 @@ _curses_panel_panel_move(PyCursesPanelObject *self, PyTypeObject *cls, PyObject if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_panel_panel_move_impl(self, cls, y, x); + return_value = _curses_panel_panel_move_impl((PyCursesPanelObject *)self, cls, y, x); exit: return return_value; @@ -217,9 +217,9 @@ static PyObject * _curses_panel_panel_window_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_window(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_window(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_window_impl(self); + return _curses_panel_panel_window_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_replace__doc__, @@ -237,7 +237,7 @@ _curses_panel_panel_replace_impl(PyCursesPanelObject *self, PyCursesWindowObject *win); static PyObject * -_curses_panel_panel_replace(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_replace(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -266,7 +266,7 @@ _curses_panel_panel_replace(PyCursesPanelObject *self, PyTypeObject *cls, PyObje goto exit; } win = (PyCursesWindowObject *)args[0]; - return_value = _curses_panel_panel_replace_impl(self, cls, win); + return_value = _curses_panel_panel_replace_impl((PyCursesPanelObject *)self, cls, win); exit: return return_value; @@ -286,7 +286,7 @@ _curses_panel_panel_set_userptr_impl(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *obj); static PyObject * -_curses_panel_panel_set_userptr(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_set_userptr(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -311,7 +311,7 @@ _curses_panel_panel_set_userptr(PyCursesPanelObject *self, PyTypeObject *cls, Py goto exit; } obj = args[0]; - return_value = _curses_panel_panel_set_userptr_impl(self, cls, obj); + return_value = _curses_panel_panel_set_userptr_impl((PyCursesPanelObject *)self, cls, obj); exit: return return_value; @@ -331,13 +331,13 @@ _curses_panel_panel_userptr_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_userptr(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_userptr(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "userptr() takes no arguments"); return NULL; } - return _curses_panel_panel_userptr_impl(self, cls); + return _curses_panel_panel_userptr_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_bottom_panel__doc__, @@ -424,4 +424,4 @@ _curses_panel_update_panels(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _curses_panel_update_panels_impl(module); } -/*[clinic end generated code: output=298e49d54c0b14a0 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=36853ecb4a979814 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h index 524a114aba98bc..8291d5d635c79d 100644 --- a/Modules/clinic/_cursesmodule.c.h +++ b/Modules/clinic/_cursesmodule.c.h @@ -35,7 +35,7 @@ _curses_window_addch_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_addch(PyCursesWindowObject *self, PyObject *args) +_curses_window_addch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -74,7 +74,7 @@ _curses_window_addch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.addch requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_addch_impl(self, group_left_1, y, x, ch, group_right_1, attr); + return_value = _curses_window_addch_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, group_right_1, attr); exit: return return_value; @@ -107,7 +107,7 @@ _curses_window_addstr_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_addstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_addstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -146,7 +146,7 @@ _curses_window_addstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.addstr requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_addstr_impl(self, group_left_1, y, x, str, group_right_1, attr); + return_value = _curses_window_addstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, group_right_1, attr); exit: return return_value; @@ -181,7 +181,7 @@ _curses_window_addnstr_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_addnstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_addnstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -221,7 +221,7 @@ _curses_window_addnstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.addnstr requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_addnstr_impl(self, group_left_1, y, x, str, n, group_right_1, attr); + return_value = _curses_window_addnstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, n, group_right_1, attr); exit: return return_value; @@ -245,7 +245,7 @@ static PyObject * _curses_window_bkgd_impl(PyCursesWindowObject *self, PyObject *ch, long attr); static PyObject * -_curses_window_bkgd(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_bkgd(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ch; @@ -263,7 +263,7 @@ _curses_window_bkgd(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_ goto exit; } skip_optional: - return_value = _curses_window_bkgd_impl(self, ch, attr); + return_value = _curses_window_bkgd_impl((PyCursesWindowObject *)self, ch, attr); exit: return return_value; @@ -282,7 +282,7 @@ static PyObject * _curses_window_attroff_impl(PyCursesWindowObject *self, long attr); static PyObject * -_curses_window_attroff(PyCursesWindowObject *self, PyObject *arg) +_curses_window_attroff(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; long attr; @@ -291,7 +291,7 @@ _curses_window_attroff(PyCursesWindowObject *self, PyObject *arg) if (attr == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_attroff_impl(self, attr); + return_value = _curses_window_attroff_impl((PyCursesWindowObject *)self, attr); exit: return return_value; @@ -310,7 +310,7 @@ static PyObject * _curses_window_attron_impl(PyCursesWindowObject *self, long attr); static PyObject * -_curses_window_attron(PyCursesWindowObject *self, PyObject *arg) +_curses_window_attron(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; long attr; @@ -319,7 +319,7 @@ _curses_window_attron(PyCursesWindowObject *self, PyObject *arg) if (attr == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_attron_impl(self, attr); + return_value = _curses_window_attron_impl((PyCursesWindowObject *)self, attr); exit: return return_value; @@ -338,7 +338,7 @@ static PyObject * _curses_window_attrset_impl(PyCursesWindowObject *self, long attr); static PyObject * -_curses_window_attrset(PyCursesWindowObject *self, PyObject *arg) +_curses_window_attrset(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; long attr; @@ -347,7 +347,7 @@ _curses_window_attrset(PyCursesWindowObject *self, PyObject *arg) if (attr == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_attrset_impl(self, attr); + return_value = _curses_window_attrset_impl((PyCursesWindowObject *)self, attr); exit: return return_value; @@ -372,7 +372,7 @@ _curses_window_bkgdset_impl(PyCursesWindowObject *self, PyObject *ch, long attr); static PyObject * -_curses_window_bkgdset(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_bkgdset(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ch; @@ -390,7 +390,7 @@ _curses_window_bkgdset(PyCursesWindowObject *self, PyObject *const *args, Py_ssi goto exit; } skip_optional: - return_value = _curses_window_bkgdset_impl(self, ch, attr); + return_value = _curses_window_bkgdset_impl((PyCursesWindowObject *)self, ch, attr); exit: return return_value; @@ -437,7 +437,7 @@ _curses_window_border_impl(PyCursesWindowObject *self, PyObject *ls, PyObject *br); static PyObject * -_curses_window_border(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_border(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ls = NULL; @@ -485,7 +485,7 @@ _curses_window_border(PyCursesWindowObject *self, PyObject *const *args, Py_ssiz } br = args[7]; skip_optional: - return_value = _curses_window_border_impl(self, ls, rs, ts, bs, tl, tr, bl, br); + return_value = _curses_window_border_impl((PyCursesWindowObject *)self, ls, rs, ts, bs, tl, tr, bl, br); exit: return return_value; @@ -511,7 +511,7 @@ _curses_window_box_impl(PyCursesWindowObject *self, int group_right_1, PyObject *verch, PyObject *horch); static PyObject * -_curses_window_box(PyCursesWindowObject *self, PyObject *args) +_curses_window_box(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -531,7 +531,7 @@ _curses_window_box(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.box requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_box_impl(self, group_right_1, verch, horch); + return_value = _curses_window_box_impl((PyCursesWindowObject *)self, group_right_1, verch, horch); exit: return return_value; @@ -554,7 +554,7 @@ _curses_window_delch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_delch(PyCursesWindowObject *self, PyObject *args) +_curses_window_delch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -574,7 +574,7 @@ _curses_window_delch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.delch requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_delch_impl(self, group_right_1, y, x); + return_value = _curses_window_delch_impl((PyCursesWindowObject *)self, group_right_1, y, x); exit: return return_value; @@ -605,7 +605,7 @@ _curses_window_derwin_impl(PyCursesWindowObject *self, int group_left_1, int nlines, int ncols, int begin_y, int begin_x); static PyObject * -_curses_window_derwin(PyCursesWindowObject *self, PyObject *args) +_curses_window_derwin(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -630,7 +630,7 @@ _curses_window_derwin(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.derwin requires 2 to 4 arguments"); goto exit; } - return_value = _curses_window_derwin_impl(self, group_left_1, nlines, ncols, begin_y, begin_x); + return_value = _curses_window_derwin_impl((PyCursesWindowObject *)self, group_left_1, nlines, ncols, begin_y, begin_x); exit: return return_value; @@ -655,7 +655,7 @@ _curses_window_echochar_impl(PyCursesWindowObject *self, PyObject *ch, long attr); static PyObject * -_curses_window_echochar(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_echochar(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ch; @@ -673,7 +673,7 @@ _curses_window_echochar(PyCursesWindowObject *self, PyObject *const *args, Py_ss goto exit; } skip_optional: - return_value = _curses_window_echochar_impl(self, ch, attr); + return_value = _curses_window_echochar_impl((PyCursesWindowObject *)self, ch, attr); exit: return return_value; @@ -699,7 +699,7 @@ static PyObject * _curses_window_enclose_impl(PyCursesWindowObject *self, int y, int x); static PyObject * -_curses_window_enclose(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_enclose(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int y; @@ -716,7 +716,7 @@ _curses_window_enclose(PyCursesWindowObject *self, PyObject *const *args, Py_ssi if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_enclose_impl(self, y, x); + return_value = _curses_window_enclose_impl((PyCursesWindowObject *)self, y, x); exit: return return_value; @@ -737,12 +737,12 @@ static long _curses_window_getbkgd_impl(PyCursesWindowObject *self); static PyObject * -_curses_window_getbkgd(PyCursesWindowObject *self, PyObject *Py_UNUSED(ignored)) +_curses_window_getbkgd(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; long _return_value; - _return_value = _curses_window_getbkgd_impl(self); + _return_value = _curses_window_getbkgd_impl((PyCursesWindowObject *)self); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -773,7 +773,7 @@ _curses_window_getch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_getch(PyCursesWindowObject *self, PyObject *args) +_curses_window_getch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -794,7 +794,7 @@ _curses_window_getch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.getch requires 0 to 2 arguments"); goto exit; } - _return_value = _curses_window_getch_impl(self, group_right_1, y, x); + _return_value = _curses_window_getch_impl((PyCursesWindowObject *)self, group_right_1, y, x); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -825,7 +825,7 @@ _curses_window_getkey_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_getkey(PyCursesWindowObject *self, PyObject *args) +_curses_window_getkey(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -845,7 +845,7 @@ _curses_window_getkey(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.getkey requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_getkey_impl(self, group_right_1, y, x); + return_value = _curses_window_getkey_impl((PyCursesWindowObject *)self, group_right_1, y, x); exit: return return_value; @@ -873,7 +873,7 @@ _curses_window_get_wch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_get_wch(PyCursesWindowObject *self, PyObject *args) +_curses_window_get_wch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -893,7 +893,7 @@ _curses_window_get_wch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.get_wch requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_get_wch_impl(self, group_right_1, y, x); + return_value = _curses_window_get_wch_impl((PyCursesWindowObject *)self, group_right_1, y, x); exit: return return_value; @@ -925,7 +925,7 @@ _curses_window_hline_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_hline(PyCursesWindowObject *self, PyObject *args) +_curses_window_hline(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -965,7 +965,7 @@ _curses_window_hline(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.hline requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_hline_impl(self, group_left_1, y, x, ch, n, group_right_1, attr); + return_value = _curses_window_hline_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, n, group_right_1, attr); exit: return return_value; @@ -996,7 +996,7 @@ _curses_window_insch_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_insch(PyCursesWindowObject *self, PyObject *args) +_curses_window_insch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1035,7 +1035,7 @@ _curses_window_insch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.insch requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_insch_impl(self, group_left_1, y, x, ch, group_right_1, attr); + return_value = _curses_window_insch_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, group_right_1, attr); exit: return return_value; @@ -1060,7 +1060,7 @@ _curses_window_inch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_inch(PyCursesWindowObject *self, PyObject *args) +_curses_window_inch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1081,7 +1081,7 @@ _curses_window_inch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.inch requires 0 to 2 arguments"); goto exit; } - _return_value = _curses_window_inch_impl(self, group_right_1, y, x); + _return_value = _curses_window_inch_impl((PyCursesWindowObject *)self, group_right_1, y, x); if ((_return_value == (unsigned long)-1) && PyErr_Occurred()) { goto exit; } @@ -1119,7 +1119,7 @@ _curses_window_insstr_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_insstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_insstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1158,7 +1158,7 @@ _curses_window_insstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.insstr requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_insstr_impl(self, group_left_1, y, x, str, group_right_1, attr); + return_value = _curses_window_insstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, group_right_1, attr); exit: return return_value; @@ -1195,7 +1195,7 @@ _curses_window_insnstr_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_insnstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_insnstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1235,7 +1235,7 @@ _curses_window_insnstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.insnstr requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_insnstr_impl(self, group_left_1, y, x, str, n, group_right_1, attr); + return_value = _curses_window_insnstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, n, group_right_1, attr); exit: return return_value; @@ -1259,7 +1259,7 @@ static PyObject * _curses_window_is_linetouched_impl(PyCursesWindowObject *self, int line); static PyObject * -_curses_window_is_linetouched(PyCursesWindowObject *self, PyObject *arg) +_curses_window_is_linetouched(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int line; @@ -1268,7 +1268,7 @@ _curses_window_is_linetouched(PyCursesWindowObject *self, PyObject *arg) if (line == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_is_linetouched_impl(self, line); + return_value = _curses_window_is_linetouched_impl((PyCursesWindowObject *)self, line); exit: return return_value; @@ -1294,7 +1294,7 @@ _curses_window_noutrefresh_impl(PyCursesWindowObject *self, int smaxcol); static PyObject * -_curses_window_noutrefresh(PyCursesWindowObject *self, PyObject *args) +_curses_window_noutrefresh(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1318,7 +1318,7 @@ _curses_window_noutrefresh(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.noutrefresh requires 0 to 6 arguments"); goto exit; } - return_value = _curses_window_noutrefresh_impl(self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); + return_value = _curses_window_noutrefresh_impl((PyCursesWindowObject *)self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); exit: return return_value; @@ -1345,9 +1345,9 @@ static PyObject * _curses_window_noutrefresh_impl(PyCursesWindowObject *self); static PyObject * -_curses_window_noutrefresh(PyCursesWindowObject *self, PyObject *Py_UNUSED(ignored)) +_curses_window_noutrefresh(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_window_noutrefresh_impl(self); + return _curses_window_noutrefresh_impl((PyCursesWindowObject *)self); } #endif /* !defined(py_is_pad) */ @@ -1375,7 +1375,7 @@ _curses_window_overlay_impl(PyCursesWindowObject *self, int dmincol, int dmaxrow, int dmaxcol); static PyObject * -_curses_window_overlay(PyCursesWindowObject *self, PyObject *args) +_curses_window_overlay(PyObject *self, PyObject *args) { PyObject *return_value = NULL; PyCursesWindowObject *destwin; @@ -1403,7 +1403,7 @@ _curses_window_overlay(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.overlay requires 1 to 7 arguments"); goto exit; } - return_value = _curses_window_overlay_impl(self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); + return_value = _curses_window_overlay_impl((PyCursesWindowObject *)self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); exit: return return_value; @@ -1434,7 +1434,7 @@ _curses_window_overwrite_impl(PyCursesWindowObject *self, int dmaxcol); static PyObject * -_curses_window_overwrite(PyCursesWindowObject *self, PyObject *args) +_curses_window_overwrite(PyObject *self, PyObject *args) { PyObject *return_value = NULL; PyCursesWindowObject *destwin; @@ -1462,7 +1462,7 @@ _curses_window_overwrite(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.overwrite requires 1 to 7 arguments"); goto exit; } - return_value = _curses_window_overwrite_impl(self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); + return_value = _curses_window_overwrite_impl((PyCursesWindowObject *)self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); exit: return return_value; @@ -1499,7 +1499,7 @@ static PyObject * _curses_window_redrawln_impl(PyCursesWindowObject *self, int beg, int num); static PyObject * -_curses_window_redrawln(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_redrawln(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int beg; @@ -1516,7 +1516,7 @@ _curses_window_redrawln(PyCursesWindowObject *self, PyObject *const *args, Py_ss if (num == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_redrawln_impl(self, beg, num); + return_value = _curses_window_redrawln_impl((PyCursesWindowObject *)self, beg, num); exit: return return_value; @@ -1547,7 +1547,7 @@ _curses_window_refresh_impl(PyCursesWindowObject *self, int group_right_1, int smincol, int smaxrow, int smaxcol); static PyObject * -_curses_window_refresh(PyCursesWindowObject *self, PyObject *args) +_curses_window_refresh(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1571,7 +1571,7 @@ _curses_window_refresh(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.refresh requires 0 to 6 arguments"); goto exit; } - return_value = _curses_window_refresh_impl(self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); + return_value = _curses_window_refresh_impl((PyCursesWindowObject *)self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); exit: return return_value; @@ -1598,7 +1598,7 @@ _curses_window_setscrreg_impl(PyCursesWindowObject *self, int top, int bottom); static PyObject * -_curses_window_setscrreg(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_setscrreg(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int top; @@ -1615,7 +1615,7 @@ _curses_window_setscrreg(PyCursesWindowObject *self, PyObject *const *args, Py_s if (bottom == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_setscrreg_impl(self, top, bottom); + return_value = _curses_window_setscrreg_impl((PyCursesWindowObject *)self, top, bottom); exit: return return_value; @@ -1645,7 +1645,7 @@ _curses_window_subwin_impl(PyCursesWindowObject *self, int group_left_1, int nlines, int ncols, int begin_y, int begin_x); static PyObject * -_curses_window_subwin(PyCursesWindowObject *self, PyObject *args) +_curses_window_subwin(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1670,7 +1670,7 @@ _curses_window_subwin(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.subwin requires 2 to 4 arguments"); goto exit; } - return_value = _curses_window_subwin_impl(self, group_left_1, nlines, ncols, begin_y, begin_x); + return_value = _curses_window_subwin_impl((PyCursesWindowObject *)self, group_left_1, nlines, ncols, begin_y, begin_x); exit: return return_value; @@ -1693,7 +1693,7 @@ _curses_window_scroll_impl(PyCursesWindowObject *self, int group_right_1, int lines); static PyObject * -_curses_window_scroll(PyCursesWindowObject *self, PyObject *args) +_curses_window_scroll(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1712,7 +1712,7 @@ _curses_window_scroll(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.scroll requires 0 to 1 arguments"); goto exit; } - return_value = _curses_window_scroll_impl(self, group_right_1, lines); + return_value = _curses_window_scroll_impl((PyCursesWindowObject *)self, group_right_1, lines); exit: return return_value; @@ -1733,7 +1733,7 @@ _curses_window_touchline_impl(PyCursesWindowObject *self, int start, int count, int group_right_1, int changed); static PyObject * -_curses_window_touchline(PyCursesWindowObject *self, PyObject *args) +_curses_window_touchline(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int start; @@ -1757,7 +1757,7 @@ _curses_window_touchline(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.touchline requires 2 to 3 arguments"); goto exit; } - return_value = _curses_window_touchline_impl(self, start, count, group_right_1, changed); + return_value = _curses_window_touchline_impl((PyCursesWindowObject *)self, start, count, group_right_1, changed); exit: return return_value; @@ -1787,7 +1787,7 @@ _curses_window_vline_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_vline(PyCursesWindowObject *self, PyObject *args) +_curses_window_vline(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1827,7 +1827,7 @@ _curses_window_vline(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.vline requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_vline_impl(self, group_left_1, y, x, ch, n, group_right_1, attr); + return_value = _curses_window_vline_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, n, group_right_1, attr); exit: return return_value; @@ -4379,4 +4379,4 @@ _curses_has_extended_color_support(PyObject *module, PyObject *Py_UNUSED(ignored #ifndef _CURSES_USE_DEFAULT_COLORS_METHODDEF #define _CURSES_USE_DEFAULT_COLORS_METHODDEF #endif /* !defined(_CURSES_USE_DEFAULT_COLORS_METHODDEF) */ -/*[clinic end generated code: output=26fe38c09ff8ca44 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c4211865ed96c2af input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_datetimemodule.c.h b/Modules/clinic/_datetimemodule.c.h index 72c230fc8aee68..8f33c9e7d4eae5 100644 --- a/Modules/clinic/_datetimemodule.c.h +++ b/Modules/clinic/_datetimemodule.c.h @@ -97,7 +97,7 @@ datetime_date_replace_impl(PyDateTime_Date *self, int year, int month, int day); static PyObject * -datetime_date_replace(PyDateTime_Date *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +datetime_date_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -162,7 +162,7 @@ datetime_date_replace(PyDateTime_Date *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_pos: - return_value = datetime_date_replace_impl(self, year, month, day); + return_value = datetime_date_replace_impl((PyDateTime_Date *)self, year, month, day); exit: return return_value; @@ -184,7 +184,7 @@ datetime_time_replace_impl(PyDateTime_Time *self, int hour, int minute, int fold); static PyObject * -datetime_time_replace(PyDateTime_Time *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +datetime_time_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -218,7 +218,7 @@ datetime_time_replace(PyDateTime_Time *self, PyObject *const *args, Py_ssize_t n int minute = TIME_GET_MINUTE(self); int second = TIME_GET_SECOND(self); int microsecond = TIME_GET_MICROSECOND(self); - PyObject *tzinfo = HASTZINFO(self) ? self->tzinfo : Py_None; + PyObject *tzinfo = HASTZINFO(self) ? ((PyDateTime_Time *)self)->tzinfo : Py_None; int fold = TIME_GET_FOLD(self); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, @@ -280,7 +280,7 @@ datetime_time_replace(PyDateTime_Time *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_kwonly: - return_value = datetime_time_replace_impl(self, hour, minute, second, microsecond, tzinfo, fold); + return_value = datetime_time_replace_impl((PyDateTime_Time *)self, hour, minute, second, microsecond, tzinfo, fold); exit: return return_value; @@ -370,7 +370,7 @@ datetime_datetime_replace_impl(PyDateTime_DateTime *self, int year, int fold); static PyObject * -datetime_datetime_replace(PyDateTime_DateTime *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +datetime_datetime_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -407,7 +407,7 @@ datetime_datetime_replace(PyDateTime_DateTime *self, PyObject *const *args, Py_s int minute = DATE_GET_MINUTE(self); int second = DATE_GET_SECOND(self); int microsecond = DATE_GET_MICROSECOND(self); - PyObject *tzinfo = HASTZINFO(self) ? self->tzinfo : Py_None; + PyObject *tzinfo = HASTZINFO(self) ? ((PyDateTime_DateTime *)self)->tzinfo : Py_None; int fold = DATE_GET_FOLD(self); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, @@ -496,9 +496,9 @@ datetime_datetime_replace(PyDateTime_DateTime *self, PyObject *const *args, Py_s goto exit; } skip_optional_kwonly: - return_value = datetime_datetime_replace_impl(self, year, month, day, hour, minute, second, microsecond, tzinfo, fold); + return_value = datetime_datetime_replace_impl((PyDateTime_DateTime *)self, year, month, day, hour, minute, second, microsecond, tzinfo, fold); exit: return return_value; } -/*[clinic end generated code: output=203217a61ea14171 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8acf62fbc7328f79 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index 4379b433db3738..5e503194408776 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -20,9 +20,9 @@ static PyObject * _dbm_dbm_close_impl(dbmobject *self); static PyObject * -_dbm_dbm_close(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_dbm_dbm_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _dbm_dbm_close_impl(self); + return _dbm_dbm_close_impl((dbmobject *)self); } PyDoc_STRVAR(_dbm_dbm_keys__doc__, @@ -38,13 +38,13 @@ static PyObject * _dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls); static PyObject * -_dbm_dbm_keys(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_keys(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); return NULL; } - return _dbm_dbm_keys_impl(self, cls); + return _dbm_dbm_keys_impl((dbmobject *)self, cls); } PyDoc_STRVAR(_dbm_dbm_get__doc__, @@ -61,7 +61,7 @@ _dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length, PyObject *default_value); static PyObject * -_dbm_dbm_get(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_get(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -85,7 +85,7 @@ _dbm_dbm_get(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize &key, &key_length, &default_value)) { goto exit; } - return_value = _dbm_dbm_get_impl(self, cls, key, key_length, default_value); + return_value = _dbm_dbm_get_impl((dbmobject *)self, cls, key, key_length, default_value); exit: return return_value; @@ -107,7 +107,7 @@ _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length, PyObject *default_value); static PyObject * -_dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_setdefault(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -131,7 +131,7 @@ _dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, P &key, &key_length, &default_value)) { goto exit; } - return_value = _dbm_dbm_setdefault_impl(self, cls, key, key_length, default_value); + return_value = _dbm_dbm_setdefault_impl((dbmobject *)self, cls, key, key_length, default_value); exit: return return_value; @@ -150,13 +150,13 @@ static PyObject * _dbm_dbm_clear_impl(dbmobject *self, PyTypeObject *cls); static PyObject * -_dbm_dbm_clear(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_clear(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); return NULL; } - return _dbm_dbm_clear_impl(self, cls); + return _dbm_dbm_clear_impl((dbmobject *)self, cls); } PyDoc_STRVAR(dbmopen__doc__, @@ -221,4 +221,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=f7d9a87d80a64278 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=3b456118f231b160 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h index 07045e72040664..78391887b615cf 100644 --- a/Modules/clinic/_elementtree.c.h +++ b/Modules/clinic/_elementtree.c.h @@ -22,7 +22,7 @@ _elementtree_Element_append_impl(ElementObject *self, PyTypeObject *cls, PyObject *subelement); static PyObject * -_elementtree_Element_append(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_append(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -51,7 +51,7 @@ _elementtree_Element_append(ElementObject *self, PyTypeObject *cls, PyObject *co goto exit; } subelement = args[0]; - return_value = _elementtree_Element_append_impl(self, cls, subelement); + return_value = _elementtree_Element_append_impl((ElementObject *)self, cls, subelement); exit: return return_value; @@ -69,9 +69,9 @@ static PyObject * _elementtree_Element_clear_impl(ElementObject *self); static PyObject * -_elementtree_Element_clear(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element_clear_impl(self); + return _elementtree_Element_clear_impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element___copy____doc__, @@ -86,13 +86,13 @@ static PyObject * _elementtree_Element___copy___impl(ElementObject *self, PyTypeObject *cls); static PyObject * -_elementtree_Element___copy__(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element___copy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__copy__() takes no arguments"); return NULL; } - return _elementtree_Element___copy___impl(self, cls); + return _elementtree_Element___copy___impl((ElementObject *)self, cls); } PyDoc_STRVAR(_elementtree_Element___deepcopy____doc__, @@ -107,7 +107,7 @@ static PyObject * _elementtree_Element___deepcopy___impl(ElementObject *self, PyObject *memo); static PyObject * -_elementtree_Element___deepcopy__(ElementObject *self, PyObject *arg) +_elementtree_Element___deepcopy__(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *memo; @@ -117,7 +117,7 @@ _elementtree_Element___deepcopy__(ElementObject *self, PyObject *arg) goto exit; } memo = arg; - return_value = _elementtree_Element___deepcopy___impl(self, memo); + return_value = _elementtree_Element___deepcopy___impl((ElementObject *)self, memo); exit: return return_value; @@ -135,12 +135,12 @@ static size_t _elementtree_Element___sizeof___impl(ElementObject *self); static PyObject * -_elementtree_Element___sizeof__(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; size_t _return_value; - _return_value = _elementtree_Element___sizeof___impl(self); + _return_value = _elementtree_Element___sizeof___impl((ElementObject *)self); if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } @@ -162,9 +162,9 @@ static PyObject * _elementtree_Element___getstate___impl(ElementObject *self); static PyObject * -_elementtree_Element___getstate__(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element___getstate__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element___getstate___impl(self); + return _elementtree_Element___getstate___impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element___setstate____doc__, @@ -180,7 +180,7 @@ _elementtree_Element___setstate___impl(ElementObject *self, PyTypeObject *cls, PyObject *state); static PyObject * -_elementtree_Element___setstate__(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element___setstate__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -205,7 +205,7 @@ _elementtree_Element___setstate__(ElementObject *self, PyTypeObject *cls, PyObje goto exit; } state = args[0]; - return_value = _elementtree_Element___setstate___impl(self, cls, state); + return_value = _elementtree_Element___setstate___impl((ElementObject *)self, cls, state); exit: return return_value; @@ -224,7 +224,7 @@ _elementtree_Element_extend_impl(ElementObject *self, PyTypeObject *cls, PyObject *elements); static PyObject * -_elementtree_Element_extend(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_extend(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -249,7 +249,7 @@ _elementtree_Element_extend(ElementObject *self, PyTypeObject *cls, PyObject *co goto exit; } elements = args[0]; - return_value = _elementtree_Element_extend_impl(self, cls, elements); + return_value = _elementtree_Element_extend_impl((ElementObject *)self, cls, elements); exit: return return_value; @@ -268,7 +268,7 @@ _elementtree_Element_find_impl(ElementObject *self, PyTypeObject *cls, PyObject *path, PyObject *namespaces); static PyObject * -_elementtree_Element_find(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_find(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -312,7 +312,7 @@ _elementtree_Element_find(ElementObject *self, PyTypeObject *cls, PyObject *cons } namespaces = args[1]; skip_optional_pos: - return_value = _elementtree_Element_find_impl(self, cls, path, namespaces); + return_value = _elementtree_Element_find_impl((ElementObject *)self, cls, path, namespaces); exit: return return_value; @@ -332,7 +332,7 @@ _elementtree_Element_findtext_impl(ElementObject *self, PyTypeObject *cls, PyObject *namespaces); static PyObject * -_elementtree_Element_findtext(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_findtext(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -383,7 +383,7 @@ _elementtree_Element_findtext(ElementObject *self, PyTypeObject *cls, PyObject * } namespaces = args[2]; skip_optional_pos: - return_value = _elementtree_Element_findtext_impl(self, cls, path, default_value, namespaces); + return_value = _elementtree_Element_findtext_impl((ElementObject *)self, cls, path, default_value, namespaces); exit: return return_value; @@ -402,7 +402,7 @@ _elementtree_Element_findall_impl(ElementObject *self, PyTypeObject *cls, PyObject *path, PyObject *namespaces); static PyObject * -_elementtree_Element_findall(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_findall(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -446,7 +446,7 @@ _elementtree_Element_findall(ElementObject *self, PyTypeObject *cls, PyObject *c } namespaces = args[1]; skip_optional_pos: - return_value = _elementtree_Element_findall_impl(self, cls, path, namespaces); + return_value = _elementtree_Element_findall_impl((ElementObject *)self, cls, path, namespaces); exit: return return_value; @@ -465,7 +465,7 @@ _elementtree_Element_iterfind_impl(ElementObject *self, PyTypeObject *cls, PyObject *path, PyObject *namespaces); static PyObject * -_elementtree_Element_iterfind(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_iterfind(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -509,7 +509,7 @@ _elementtree_Element_iterfind(ElementObject *self, PyTypeObject *cls, PyObject * } namespaces = args[1]; skip_optional_pos: - return_value = _elementtree_Element_iterfind_impl(self, cls, path, namespaces); + return_value = _elementtree_Element_iterfind_impl((ElementObject *)self, cls, path, namespaces); exit: return return_value; @@ -528,7 +528,7 @@ _elementtree_Element_get_impl(ElementObject *self, PyObject *key, PyObject *default_value); static PyObject * -_elementtree_Element_get(ElementObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -572,7 +572,7 @@ _elementtree_Element_get(ElementObject *self, PyObject *const *args, Py_ssize_t } default_value = args[1]; skip_optional_pos: - return_value = _elementtree_Element_get_impl(self, key, default_value); + return_value = _elementtree_Element_get_impl((ElementObject *)self, key, default_value); exit: return return_value; @@ -591,7 +591,7 @@ _elementtree_Element_iter_impl(ElementObject *self, PyTypeObject *cls, PyObject *tag); static PyObject * -_elementtree_Element_iter(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_iter(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -633,7 +633,7 @@ _elementtree_Element_iter(ElementObject *self, PyTypeObject *cls, PyObject *cons } tag = args[0]; skip_optional_pos: - return_value = _elementtree_Element_iter_impl(self, cls, tag); + return_value = _elementtree_Element_iter_impl((ElementObject *)self, cls, tag); exit: return return_value; @@ -651,13 +651,13 @@ static PyObject * _elementtree_Element_itertext_impl(ElementObject *self, PyTypeObject *cls); static PyObject * -_elementtree_Element_itertext(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_itertext(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "itertext() takes no arguments"); return NULL; } - return _elementtree_Element_itertext_impl(self, cls); + return _elementtree_Element_itertext_impl((ElementObject *)self, cls); } PyDoc_STRVAR(_elementtree_Element_insert__doc__, @@ -673,7 +673,7 @@ _elementtree_Element_insert_impl(ElementObject *self, Py_ssize_t index, PyObject *subelement); static PyObject * -_elementtree_Element_insert(ElementObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_Element_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -699,7 +699,7 @@ _elementtree_Element_insert(ElementObject *self, PyObject *const *args, Py_ssize goto exit; } subelement = args[1]; - return_value = _elementtree_Element_insert_impl(self, index, subelement); + return_value = _elementtree_Element_insert_impl((ElementObject *)self, index, subelement); exit: return return_value; @@ -717,9 +717,9 @@ static PyObject * _elementtree_Element_items_impl(ElementObject *self); static PyObject * -_elementtree_Element_items(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element_items(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element_items_impl(self); + return _elementtree_Element_items_impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element_keys__doc__, @@ -734,9 +734,9 @@ static PyObject * _elementtree_Element_keys_impl(ElementObject *self); static PyObject * -_elementtree_Element_keys(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element_keys_impl(self); + return _elementtree_Element_keys_impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element_makeelement__doc__, @@ -752,7 +752,7 @@ _elementtree_Element_makeelement_impl(ElementObject *self, PyTypeObject *cls, PyObject *tag, PyObject *attrib); static PyObject * -_elementtree_Element_makeelement(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_makeelement(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -783,7 +783,7 @@ _elementtree_Element_makeelement(ElementObject *self, PyTypeObject *cls, PyObjec goto exit; } attrib = args[1]; - return_value = _elementtree_Element_makeelement_impl(self, cls, tag, attrib); + return_value = _elementtree_Element_makeelement_impl((ElementObject *)self, cls, tag, attrib); exit: return return_value; @@ -801,7 +801,7 @@ static PyObject * _elementtree_Element_remove_impl(ElementObject *self, PyObject *subelement); static PyObject * -_elementtree_Element_remove(ElementObject *self, PyObject *arg) +_elementtree_Element_remove(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *subelement; @@ -811,7 +811,7 @@ _elementtree_Element_remove(ElementObject *self, PyObject *arg) goto exit; } subelement = arg; - return_value = _elementtree_Element_remove_impl(self, subelement); + return_value = _elementtree_Element_remove_impl((ElementObject *)self, subelement); exit: return return_value; @@ -830,7 +830,7 @@ _elementtree_Element_set_impl(ElementObject *self, PyObject *key, PyObject *value); static PyObject * -_elementtree_Element_set(ElementObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_Element_set(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -841,7 +841,7 @@ _elementtree_Element_set(ElementObject *self, PyObject *const *args, Py_ssize_t } key = args[0]; value = args[1]; - return_value = _elementtree_Element_set_impl(self, key, value); + return_value = _elementtree_Element_set_impl((ElementObject *)self, key, value); exit: return return_value; @@ -1013,7 +1013,7 @@ _elementtree_TreeBuilder_pi_impl(TreeBuilderObject *self, PyObject *target, PyObject *text); static PyObject * -_elementtree_TreeBuilder_pi(TreeBuilderObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_TreeBuilder_pi(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *target; @@ -1028,7 +1028,7 @@ _elementtree_TreeBuilder_pi(TreeBuilderObject *self, PyObject *const *args, Py_s } text = args[1]; skip_optional: - return_value = _elementtree_TreeBuilder_pi_impl(self, target, text); + return_value = _elementtree_TreeBuilder_pi_impl((TreeBuilderObject *)self, target, text); exit: return return_value; @@ -1046,9 +1046,9 @@ static PyObject * _elementtree_TreeBuilder_close_impl(TreeBuilderObject *self); static PyObject * -_elementtree_TreeBuilder_close(TreeBuilderObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_TreeBuilder_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_TreeBuilder_close_impl(self); + return _elementtree_TreeBuilder_close_impl((TreeBuilderObject *)self); } PyDoc_STRVAR(_elementtree_TreeBuilder_start__doc__, @@ -1064,7 +1064,7 @@ _elementtree_TreeBuilder_start_impl(TreeBuilderObject *self, PyObject *tag, PyObject *attrs); static PyObject * -_elementtree_TreeBuilder_start(TreeBuilderObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_TreeBuilder_start(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *tag; @@ -1079,7 +1079,7 @@ _elementtree_TreeBuilder_start(TreeBuilderObject *self, PyObject *const *args, P goto exit; } attrs = args[1]; - return_value = _elementtree_TreeBuilder_start_impl(self, tag, attrs); + return_value = _elementtree_TreeBuilder_start_impl((TreeBuilderObject *)self, tag, attrs); exit: return return_value; @@ -1176,9 +1176,9 @@ static PyObject * _elementtree_XMLParser_close_impl(XMLParserObject *self); static PyObject * -_elementtree_XMLParser_close(XMLParserObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_XMLParser_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_XMLParser_close_impl(self); + return _elementtree_XMLParser_close_impl((XMLParserObject *)self); } PyDoc_STRVAR(_elementtree_XMLParser_flush__doc__, @@ -1193,9 +1193,9 @@ static PyObject * _elementtree_XMLParser_flush_impl(XMLParserObject *self); static PyObject * -_elementtree_XMLParser_flush(XMLParserObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_XMLParser_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_XMLParser_flush_impl(self); + return _elementtree_XMLParser_flush_impl((XMLParserObject *)self); } PyDoc_STRVAR(_elementtree_XMLParser_feed__doc__, @@ -1228,7 +1228,7 @@ _elementtree_XMLParser__setevents_impl(XMLParserObject *self, PyObject *events_to_report); static PyObject * -_elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_XMLParser__setevents(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *events_queue; @@ -1243,9 +1243,9 @@ _elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, } events_to_report = args[1]; skip_optional: - return_value = _elementtree_XMLParser__setevents_impl(self, events_queue, events_to_report); + return_value = _elementtree_XMLParser__setevents_impl((XMLParserObject *)self, events_queue, events_to_report); exit: return return_value; } -/*[clinic end generated code: output=b713bf59fd0fef9b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e5c758958f14f102 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index bbf4365114c0aa..00950f18e53541 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -20,7 +20,7 @@ static PyObject * _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value); static PyObject * -_gdbm_gdbm_get(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_gdbm_gdbm_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -35,7 +35,7 @@ _gdbm_gdbm_get(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - return_value = _gdbm_gdbm_get_impl(self, key, default_value); + return_value = _gdbm_gdbm_get_impl((gdbmobject *)self, key, default_value); exit: return return_value; @@ -55,7 +55,7 @@ _gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, PyObject *default_value); static PyObject * -_gdbm_gdbm_setdefault(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_gdbm_gdbm_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -70,7 +70,7 @@ _gdbm_gdbm_setdefault(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - return_value = _gdbm_gdbm_setdefault_impl(self, key, default_value); + return_value = _gdbm_gdbm_setdefault_impl((gdbmobject *)self, key, default_value); exit: return return_value; @@ -89,9 +89,9 @@ static PyObject * _gdbm_gdbm_close_impl(gdbmobject *self); static PyObject * -_gdbm_gdbm_close(gdbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _gdbm_gdbm_close_impl(self); + return _gdbm_gdbm_close_impl((gdbmobject *)self); } PyDoc_STRVAR(_gdbm_gdbm_keys__doc__, @@ -107,13 +107,13 @@ static PyObject * _gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_keys(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_keys(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); return NULL; } - return _gdbm_gdbm_keys_impl(self, cls); + return _gdbm_gdbm_keys_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, @@ -133,13 +133,13 @@ static PyObject * _gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_firstkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_firstkey(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "firstkey() takes no arguments"); return NULL; } - return _gdbm_gdbm_firstkey_impl(self, cls); + return _gdbm_gdbm_firstkey_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, @@ -164,7 +164,7 @@ _gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length); static PyObject * -_gdbm_gdbm_nextkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_nextkey(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -187,7 +187,7 @@ _gdbm_gdbm_nextkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, P &key, &key_length)) { goto exit; } - return_value = _gdbm_gdbm_nextkey_impl(self, cls, key, key_length); + return_value = _gdbm_gdbm_nextkey_impl((gdbmobject *)self, cls, key, key_length); exit: return return_value; @@ -212,13 +212,13 @@ static PyObject * _gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_reorganize(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_reorganize(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "reorganize() takes no arguments"); return NULL; } - return _gdbm_gdbm_reorganize_impl(self, cls); + return _gdbm_gdbm_reorganize_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, @@ -237,13 +237,13 @@ static PyObject * _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_sync(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_sync(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "sync() takes no arguments"); return NULL; } - return _gdbm_gdbm_sync_impl(self, cls); + return _gdbm_gdbm_sync_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_clear__doc__, @@ -259,13 +259,13 @@ static PyObject * _gdbm_gdbm_clear_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_clear(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_clear(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); return NULL; } - return _gdbm_gdbm_clear_impl(self, cls); + return _gdbm_gdbm_clear_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(dbmopen__doc__, @@ -343,4 +343,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=07bdeb4a8ecb328e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d974cb39e4ee5d67 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index f54f065f7d2b71..d219b80b791a66 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -22,9 +22,9 @@ static PyObject * EVP_copy_impl(EVPobject *self); static PyObject * -EVP_copy(EVPobject *self, PyObject *Py_UNUSED(ignored)) +EVP_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EVP_copy_impl(self); + return EVP_copy_impl((EVPobject *)self); } PyDoc_STRVAR(EVP_digest__doc__, @@ -40,9 +40,9 @@ static PyObject * EVP_digest_impl(EVPobject *self); static PyObject * -EVP_digest(EVPobject *self, PyObject *Py_UNUSED(ignored)) +EVP_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EVP_digest_impl(self); + return EVP_digest_impl((EVPobject *)self); } PyDoc_STRVAR(EVP_hexdigest__doc__, @@ -58,9 +58,9 @@ static PyObject * EVP_hexdigest_impl(EVPobject *self); static PyObject * -EVP_hexdigest(EVPobject *self, PyObject *Py_UNUSED(ignored)) +EVP_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EVP_hexdigest_impl(self); + return EVP_hexdigest_impl((EVPobject *)self); } PyDoc_STRVAR(EVP_update__doc__, @@ -87,7 +87,7 @@ static PyObject * EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length); static PyObject * -EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +EVPXOF_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -135,7 +135,7 @@ EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } length = ival; } - return_value = EVPXOF_digest_impl(self, length); + return_value = EVPXOF_digest_impl((EVPobject *)self, length); exit: return return_value; @@ -158,7 +158,7 @@ static PyObject * EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length); static PyObject * -EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +EVPXOF_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -206,7 +206,7 @@ EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObj } length = ival; } - return_value = EVPXOF_hexdigest_impl(self, length); + return_value = EVPXOF_hexdigest_impl((EVPobject *)self, length); exit: return return_value; @@ -1634,9 +1634,9 @@ static PyObject * _hashlib_HMAC_copy_impl(HMACobject *self); static PyObject * -_hashlib_HMAC_copy(HMACobject *self, PyObject *Py_UNUSED(ignored)) +_hashlib_HMAC_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _hashlib_HMAC_copy_impl(self); + return _hashlib_HMAC_copy_impl((HMACobject *)self); } PyDoc_STRVAR(_hashlib_HMAC_update__doc__, @@ -1652,7 +1652,7 @@ static PyObject * _hashlib_HMAC_update_impl(HMACobject *self, PyObject *msg); static PyObject * -_hashlib_HMAC_update(HMACobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_hashlib_HMAC_update(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1689,7 +1689,7 @@ _hashlib_HMAC_update(HMACobject *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } msg = args[0]; - return_value = _hashlib_HMAC_update_impl(self, msg); + return_value = _hashlib_HMAC_update_impl((HMACobject *)self, msg); exit: return return_value; @@ -1708,9 +1708,9 @@ static PyObject * _hashlib_HMAC_digest_impl(HMACobject *self); static PyObject * -_hashlib_HMAC_digest(HMACobject *self, PyObject *Py_UNUSED(ignored)) +_hashlib_HMAC_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _hashlib_HMAC_digest_impl(self); + return _hashlib_HMAC_digest_impl((HMACobject *)self); } PyDoc_STRVAR(_hashlib_HMAC_hexdigest__doc__, @@ -1729,9 +1729,9 @@ static PyObject * _hashlib_HMAC_hexdigest_impl(HMACobject *self); static PyObject * -_hashlib_HMAC_hexdigest(HMACobject *self, PyObject *Py_UNUSED(ignored)) +_hashlib_HMAC_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _hashlib_HMAC_hexdigest_impl(self); + return _hashlib_HMAC_hexdigest_impl((HMACobject *)self); } PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, @@ -1844,4 +1844,4 @@ _hashlib_compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t narg #ifndef _HASHLIB_SCRYPT_METHODDEF #define _HASHLIB_SCRYPT_METHODDEF #endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */ -/*[clinic end generated code: output=c3ef67e4a573cc7a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=811a8b50beae1018 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_lsprof.c.h b/Modules/clinic/_lsprof.c.h index e19840f97e5c69..6a75a8f9833d1e 100644 --- a/Modules/clinic/_lsprof.c.h +++ b/Modules/clinic/_lsprof.c.h @@ -43,13 +43,13 @@ static PyObject * _lsprof_Profiler_getstats_impl(ProfilerObject *self, PyTypeObject *cls); static PyObject * -_lsprof_Profiler_getstats(ProfilerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_lsprof_Profiler_getstats(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "getstats() takes no arguments"); return NULL; } - return _lsprof_Profiler_getstats_impl(self, cls); + return _lsprof_Profiler_getstats_impl((ProfilerObject *)self, cls); } PyDoc_STRVAR(_lsprof_Profiler__pystart_callback__doc__, @@ -65,7 +65,7 @@ _lsprof_Profiler__pystart_callback_impl(ProfilerObject *self, PyObject *code, PyObject *instruction_offset); static PyObject * -_lsprof_Profiler__pystart_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__pystart_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -76,7 +76,7 @@ _lsprof_Profiler__pystart_callback(ProfilerObject *self, PyObject *const *args, } code = args[0]; instruction_offset = args[1]; - return_value = _lsprof_Profiler__pystart_callback_impl(self, code, instruction_offset); + return_value = _lsprof_Profiler__pystart_callback_impl((ProfilerObject *)self, code, instruction_offset); exit: return return_value; @@ -97,7 +97,7 @@ _lsprof_Profiler__pyreturn_callback_impl(ProfilerObject *self, PyObject *retval); static PyObject * -_lsprof_Profiler__pyreturn_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__pyreturn_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -110,7 +110,7 @@ _lsprof_Profiler__pyreturn_callback(ProfilerObject *self, PyObject *const *args, code = args[0]; instruction_offset = args[1]; retval = args[2]; - return_value = _lsprof_Profiler__pyreturn_callback_impl(self, code, instruction_offset, retval); + return_value = _lsprof_Profiler__pyreturn_callback_impl((ProfilerObject *)self, code, instruction_offset, retval); exit: return return_value; @@ -130,7 +130,7 @@ _lsprof_Profiler__ccall_callback_impl(ProfilerObject *self, PyObject *code, PyObject *callable, PyObject *self_arg); static PyObject * -_lsprof_Profiler__ccall_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__ccall_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -145,7 +145,7 @@ _lsprof_Profiler__ccall_callback(ProfilerObject *self, PyObject *const *args, Py instruction_offset = args[1]; callable = args[2]; self_arg = args[3]; - return_value = _lsprof_Profiler__ccall_callback_impl(self, code, instruction_offset, callable, self_arg); + return_value = _lsprof_Profiler__ccall_callback_impl((ProfilerObject *)self, code, instruction_offset, callable, self_arg); exit: return return_value; @@ -167,7 +167,7 @@ _lsprof_Profiler__creturn_callback_impl(ProfilerObject *self, PyObject *code, PyObject *self_arg); static PyObject * -_lsprof_Profiler__creturn_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__creturn_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -182,7 +182,7 @@ _lsprof_Profiler__creturn_callback(ProfilerObject *self, PyObject *const *args, instruction_offset = args[1]; callable = args[2]; self_arg = args[3]; - return_value = _lsprof_Profiler__creturn_callback_impl(self, code, instruction_offset, callable, self_arg); + return_value = _lsprof_Profiler__creturn_callback_impl((ProfilerObject *)self, code, instruction_offset, callable, self_arg); exit: return return_value; @@ -209,7 +209,7 @@ _lsprof_Profiler_enable_impl(ProfilerObject *self, int subcalls, int builtins); static PyObject * -_lsprof_Profiler_enable(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_lsprof_Profiler_enable(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -264,7 +264,7 @@ _lsprof_Profiler_enable(ProfilerObject *self, PyObject *const *args, Py_ssize_t goto exit; } skip_optional_pos: - return_value = _lsprof_Profiler_enable_impl(self, subcalls, builtins); + return_value = _lsprof_Profiler_enable_impl((ProfilerObject *)self, subcalls, builtins); exit: return return_value; @@ -283,9 +283,9 @@ static PyObject * _lsprof_Profiler_disable_impl(ProfilerObject *self); static PyObject * -_lsprof_Profiler_disable(ProfilerObject *self, PyObject *Py_UNUSED(ignored)) +_lsprof_Profiler_disable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _lsprof_Profiler_disable_impl(self); + return _lsprof_Profiler_disable_impl((ProfilerObject *)self); } PyDoc_STRVAR(_lsprof_Profiler_clear__doc__, @@ -301,9 +301,9 @@ static PyObject * _lsprof_Profiler_clear_impl(ProfilerObject *self); static PyObject * -_lsprof_Profiler_clear(ProfilerObject *self, PyObject *Py_UNUSED(ignored)) +_lsprof_Profiler_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _lsprof_Profiler_clear_impl(self); + return _lsprof_Profiler_clear_impl((ProfilerObject *)self); } PyDoc_STRVAR(profiler_init__doc__, @@ -407,4 +407,4 @@ profiler_init(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=e56d849e35d005a5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d983dbf23fd8ac3b input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_lzmamodule.c.h b/Modules/clinic/_lzmamodule.c.h index 187f7b183dca84..c7c81d8d1f1b9d 100644 --- a/Modules/clinic/_lzmamodule.c.h +++ b/Modules/clinic/_lzmamodule.c.h @@ -27,7 +27,7 @@ static PyObject * _lzma_LZMACompressor_compress_impl(Compressor *self, Py_buffer *data); static PyObject * -_lzma_LZMACompressor_compress(Compressor *self, PyObject *arg) +_lzma_LZMACompressor_compress(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer data = {NULL, NULL}; @@ -35,7 +35,7 @@ _lzma_LZMACompressor_compress(Compressor *self, PyObject *arg) if (PyObject_GetBuffer(arg, &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _lzma_LZMACompressor_compress_impl(self, &data); + return_value = _lzma_LZMACompressor_compress_impl((Compressor *)self, &data); exit: /* Cleanup for data */ @@ -63,9 +63,9 @@ static PyObject * _lzma_LZMACompressor_flush_impl(Compressor *self); static PyObject * -_lzma_LZMACompressor_flush(Compressor *self, PyObject *Py_UNUSED(ignored)) +_lzma_LZMACompressor_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _lzma_LZMACompressor_flush_impl(self); + return _lzma_LZMACompressor_flush_impl((Compressor *)self); } PyDoc_STRVAR(_lzma_LZMADecompressor_decompress__doc__, @@ -95,7 +95,7 @@ _lzma_LZMADecompressor_decompress_impl(Decompressor *self, Py_buffer *data, Py_ssize_t max_length); static PyObject * -_lzma_LZMADecompressor_decompress(Decompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_lzma_LZMADecompressor_decompress(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -152,7 +152,7 @@ _lzma_LZMADecompressor_decompress(Decompressor *self, PyObject *const *args, Py_ max_length = ival; } skip_optional_pos: - return_value = _lzma_LZMADecompressor_decompress_impl(self, &data, max_length); + return_value = _lzma_LZMADecompressor_decompress_impl((Decompressor *)self, &data, max_length); exit: /* Cleanup for data */ @@ -329,4 +329,4 @@ _lzma__decode_filter_properties(PyObject *module, PyObject *const *args, Py_ssiz return return_value; } -/*[clinic end generated code: output=52e1b68d0886cebb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=19ed9b1182f5ddf9 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index 2e84bb83e21ca6..91d355c5afb353 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -26,9 +26,9 @@ static PyObject * _pickle_Pickler_clear_memo_impl(PicklerObject *self); static PyObject * -_pickle_Pickler_clear_memo(PicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Pickler_clear_memo(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_Pickler_clear_memo_impl(self); + return _pickle_Pickler_clear_memo_impl((PicklerObject *)self); } PyDoc_STRVAR(_pickle_Pickler_dump__doc__, @@ -45,7 +45,7 @@ _pickle_Pickler_dump_impl(PicklerObject *self, PyTypeObject *cls, PyObject *obj); static PyObject * -_pickle_Pickler_dump(PicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Pickler_dump(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -70,7 +70,7 @@ _pickle_Pickler_dump(PicklerObject *self, PyTypeObject *cls, PyObject *const *ar goto exit; } obj = args[0]; - return_value = _pickle_Pickler_dump_impl(self, cls, obj); + return_value = _pickle_Pickler_dump_impl((PicklerObject *)self, cls, obj); exit: return return_value; @@ -89,12 +89,12 @@ static size_t _pickle_Pickler___sizeof___impl(PicklerObject *self); static PyObject * -_pickle_Pickler___sizeof__(PicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Pickler___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; size_t _return_value; - _return_value = _pickle_Pickler___sizeof___impl(self); + _return_value = _pickle_Pickler___sizeof___impl((PicklerObject *)self); if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } @@ -227,9 +227,9 @@ static PyObject * _pickle_PicklerMemoProxy_clear_impl(PicklerMemoProxyObject *self); static PyObject * -_pickle_PicklerMemoProxy_clear(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_PicklerMemoProxy_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_PicklerMemoProxy_clear_impl(self); + return _pickle_PicklerMemoProxy_clear_impl((PicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_PicklerMemoProxy_copy__doc__, @@ -245,9 +245,9 @@ static PyObject * _pickle_PicklerMemoProxy_copy_impl(PicklerMemoProxyObject *self); static PyObject * -_pickle_PicklerMemoProxy_copy(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_PicklerMemoProxy_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_PicklerMemoProxy_copy_impl(self); + return _pickle_PicklerMemoProxy_copy_impl((PicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_PicklerMemoProxy___reduce____doc__, @@ -263,9 +263,9 @@ static PyObject * _pickle_PicklerMemoProxy___reduce___impl(PicklerMemoProxyObject *self); static PyObject * -_pickle_PicklerMemoProxy___reduce__(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_PicklerMemoProxy___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_PicklerMemoProxy___reduce___impl(self); + return _pickle_PicklerMemoProxy___reduce___impl((PicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_Unpickler_persistent_load__doc__, @@ -281,7 +281,7 @@ _pickle_Unpickler_persistent_load_impl(UnpicklerObject *self, PyTypeObject *cls, PyObject *pid); static PyObject * -_pickle_Unpickler_persistent_load(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Unpickler_persistent_load(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -306,7 +306,7 @@ _pickle_Unpickler_persistent_load(UnpicklerObject *self, PyTypeObject *cls, PyOb goto exit; } pid = args[0]; - return_value = _pickle_Unpickler_persistent_load_impl(self, cls, pid); + return_value = _pickle_Unpickler_persistent_load_impl((UnpicklerObject *)self, cls, pid); exit: return return_value; @@ -329,13 +329,13 @@ static PyObject * _pickle_Unpickler_load_impl(UnpicklerObject *self, PyTypeObject *cls); static PyObject * -_pickle_Unpickler_load(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Unpickler_load(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "load() takes no arguments"); return NULL; } - return _pickle_Unpickler_load_impl(self, cls); + return _pickle_Unpickler_load_impl((UnpicklerObject *)self, cls); } PyDoc_STRVAR(_pickle_Unpickler_find_class__doc__, @@ -360,7 +360,7 @@ _pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyTypeObject *cls, PyObject *global_name); static PyObject * -_pickle_Unpickler_find_class(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Unpickler_find_class(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -387,7 +387,7 @@ _pickle_Unpickler_find_class(UnpicklerObject *self, PyTypeObject *cls, PyObject } module_name = args[0]; global_name = args[1]; - return_value = _pickle_Unpickler_find_class_impl(self, cls, module_name, global_name); + return_value = _pickle_Unpickler_find_class_impl((UnpicklerObject *)self, cls, module_name, global_name); exit: return return_value; @@ -406,12 +406,12 @@ static size_t _pickle_Unpickler___sizeof___impl(UnpicklerObject *self); static PyObject * -_pickle_Unpickler___sizeof__(UnpicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Unpickler___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; size_t _return_value; - _return_value = _pickle_Unpickler___sizeof___impl(self); + _return_value = _pickle_Unpickler___sizeof___impl((UnpicklerObject *)self); if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } @@ -566,9 +566,9 @@ static PyObject * _pickle_UnpicklerMemoProxy_clear_impl(UnpicklerMemoProxyObject *self); static PyObject * -_pickle_UnpicklerMemoProxy_clear(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_UnpicklerMemoProxy_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_UnpicklerMemoProxy_clear_impl(self); + return _pickle_UnpicklerMemoProxy_clear_impl((UnpicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_UnpicklerMemoProxy_copy__doc__, @@ -584,9 +584,9 @@ static PyObject * _pickle_UnpicklerMemoProxy_copy_impl(UnpicklerMemoProxyObject *self); static PyObject * -_pickle_UnpicklerMemoProxy_copy(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_UnpicklerMemoProxy_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_UnpicklerMemoProxy_copy_impl(self); + return _pickle_UnpicklerMemoProxy_copy_impl((UnpicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_UnpicklerMemoProxy___reduce____doc__, @@ -602,9 +602,9 @@ static PyObject * _pickle_UnpicklerMemoProxy___reduce___impl(UnpicklerMemoProxyObject *self); static PyObject * -_pickle_UnpicklerMemoProxy___reduce__(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_UnpicklerMemoProxy___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_UnpicklerMemoProxy___reduce___impl(self); + return _pickle_UnpicklerMemoProxy___reduce___impl((UnpicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_dump__doc__, @@ -1086,4 +1086,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=48ceb6687a8e716c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d71dc73af298ebe8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_queuemodule.c.h b/Modules/clinic/_queuemodule.c.h index f0d4a3a164cd5f..2dfc3e6be1984e 100644 --- a/Modules/clinic/_queuemodule.c.h +++ b/Modules/clinic/_queuemodule.c.h @@ -55,7 +55,7 @@ _queue_SimpleQueue_put_impl(simplequeueobject *self, PyObject *item, int block, PyObject *timeout); static PyObject * -_queue_SimpleQueue_put(simplequeueobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_put(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -110,7 +110,7 @@ _queue_SimpleQueue_put(simplequeueobject *self, PyObject *const *args, Py_ssize_ timeout = args[2]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_put_impl(self, item, block, timeout); + return_value = _queue_SimpleQueue_put_impl((simplequeueobject *)self, item, block, timeout); Py_END_CRITICAL_SECTION(); exit: @@ -133,7 +133,7 @@ static PyObject * _queue_SimpleQueue_put_nowait_impl(simplequeueobject *self, PyObject *item); static PyObject * -_queue_SimpleQueue_put_nowait(simplequeueobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_put_nowait(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -171,7 +171,7 @@ _queue_SimpleQueue_put_nowait(simplequeueobject *self, PyObject *const *args, Py } item = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_put_nowait_impl(self, item); + return_value = _queue_SimpleQueue_put_nowait_impl((simplequeueobject *)self, item); Py_END_CRITICAL_SECTION(); exit: @@ -200,7 +200,7 @@ _queue_SimpleQueue_get_impl(simplequeueobject *self, PyTypeObject *cls, int block, PyObject *timeout_obj); static PyObject * -_queue_SimpleQueue_get(simplequeueobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_get(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -253,7 +253,7 @@ _queue_SimpleQueue_get(simplequeueobject *self, PyTypeObject *cls, PyObject *con timeout_obj = args[1]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_get_impl(self, cls, block, timeout_obj); + return_value = _queue_SimpleQueue_get_impl((simplequeueobject *)self, cls, block, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -277,7 +277,7 @@ _queue_SimpleQueue_get_nowait_impl(simplequeueobject *self, PyTypeObject *cls); static PyObject * -_queue_SimpleQueue_get_nowait(simplequeueobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_get_nowait(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; @@ -286,7 +286,7 @@ _queue_SimpleQueue_get_nowait(simplequeueobject *self, PyTypeObject *cls, PyObje goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_get_nowait_impl(self, cls); + return_value = _queue_SimpleQueue_get_nowait_impl((simplequeueobject *)self, cls); Py_END_CRITICAL_SECTION(); exit: @@ -306,13 +306,13 @@ static int _queue_SimpleQueue_empty_impl(simplequeueobject *self); static PyObject * -_queue_SimpleQueue_empty(simplequeueobject *self, PyObject *Py_UNUSED(ignored)) +_queue_SimpleQueue_empty(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; int _return_value; Py_BEGIN_CRITICAL_SECTION(self); - _return_value = _queue_SimpleQueue_empty_impl(self); + _return_value = _queue_SimpleQueue_empty_impl((simplequeueobject *)self); Py_END_CRITICAL_SECTION(); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; @@ -336,13 +336,13 @@ static Py_ssize_t _queue_SimpleQueue_qsize_impl(simplequeueobject *self); static PyObject * -_queue_SimpleQueue_qsize(simplequeueobject *self, PyObject *Py_UNUSED(ignored)) +_queue_SimpleQueue_qsize(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_ssize_t _return_value; Py_BEGIN_CRITICAL_SECTION(self); - _return_value = _queue_SimpleQueue_qsize_impl(self); + _return_value = _queue_SimpleQueue_qsize_impl((simplequeueobject *)self); Py_END_CRITICAL_SECTION(); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; @@ -352,4 +352,4 @@ _queue_SimpleQueue_qsize(simplequeueobject *self, PyObject *Py_UNUSED(ignored)) exit: return return_value; } -/*[clinic end generated code: output=07b5742dca7692d9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e04e15a1b959c700 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h index 6193acac67e7ac..b2d67e11c63595 100644 --- a/Modules/clinic/_randommodule.c.h +++ b/Modules/clinic/_randommodule.c.h @@ -18,12 +18,12 @@ static PyObject * _random_Random_random_impl(RandomObject *self); static PyObject * -_random_Random_random(RandomObject *self, PyObject *Py_UNUSED(ignored)) +_random_Random_random(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_random_impl(self); + return_value = _random_Random_random_impl((RandomObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -45,7 +45,7 @@ static PyObject * _random_Random_seed_impl(RandomObject *self, PyObject *n); static PyObject * -_random_Random_seed(RandomObject *self, PyObject *const *args, Py_ssize_t nargs) +_random_Random_seed(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *n = Py_None; @@ -59,7 +59,7 @@ _random_Random_seed(RandomObject *self, PyObject *const *args, Py_ssize_t nargs) n = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_seed_impl(self, n); + return_value = _random_Random_seed_impl((RandomObject *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -79,12 +79,12 @@ static PyObject * _random_Random_getstate_impl(RandomObject *self); static PyObject * -_random_Random_getstate(RandomObject *self, PyObject *Py_UNUSED(ignored)) +_random_Random_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_getstate_impl(self); + return_value = _random_Random_getstate_impl((RandomObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -108,7 +108,7 @@ _random_Random_setstate(RandomObject *self, PyObject *state) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_setstate_impl(self, state); + return_value = _random_Random_setstate_impl((RandomObject *)self, state); Py_END_CRITICAL_SECTION(); return return_value; @@ -127,7 +127,7 @@ static PyObject * _random_Random_getrandbits_impl(RandomObject *self, int k); static PyObject * -_random_Random_getrandbits(RandomObject *self, PyObject *arg) +_random_Random_getrandbits(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int k; @@ -137,10 +137,10 @@ _random_Random_getrandbits(RandomObject *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_getrandbits_impl(self, k); + return_value = _random_Random_getrandbits_impl((RandomObject *)self, k); Py_END_CRITICAL_SECTION(); exit: return return_value; } -/*[clinic end generated code: output=bf49ece1d341b1b6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=859cfbf59c133a4e input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h index becdb9cc1831fa..73c5d304f1a141 100644 --- a/Modules/clinic/_ssl.c.h +++ b/Modules/clinic/_ssl.c.h @@ -21,12 +21,12 @@ static PyObject * _ssl__SSLSocket_do_handshake_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_do_handshake(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_do_handshake(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_do_handshake_impl(self); + return_value = _ssl__SSLSocket_do_handshake_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -79,7 +79,7 @@ static PyObject * _ssl__SSLSocket_getpeercert_impl(PySSLSocket *self, int binary_mode); static PyObject * -_ssl__SSLSocket_getpeercert(PySSLSocket *self, PyObject *const *args, Py_ssize_t nargs) +_ssl__SSLSocket_getpeercert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int binary_mode = 0; @@ -96,7 +96,7 @@ _ssl__SSLSocket_getpeercert(PySSLSocket *self, PyObject *const *args, Py_ssize_t } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_getpeercert_impl(self, binary_mode); + return_value = _ssl__SSLSocket_getpeercert_impl((PySSLSocket *)self, binary_mode); Py_END_CRITICAL_SECTION(); exit: @@ -115,12 +115,12 @@ static PyObject * _ssl__SSLSocket_get_verified_chain_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_get_verified_chain(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_get_verified_chain(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_get_verified_chain_impl(self); + return_value = _ssl__SSLSocket_get_verified_chain_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -138,12 +138,12 @@ static PyObject * _ssl__SSLSocket_get_unverified_chain_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_get_unverified_chain(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_get_unverified_chain(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_get_unverified_chain_impl(self); + return_value = _ssl__SSLSocket_get_unverified_chain_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -161,12 +161,12 @@ static PyObject * _ssl__SSLSocket_shared_ciphers_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_shared_ciphers(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_shared_ciphers(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_shared_ciphers_impl(self); + return_value = _ssl__SSLSocket_shared_ciphers_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -184,12 +184,12 @@ static PyObject * _ssl__SSLSocket_cipher_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_cipher(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_cipher(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_cipher_impl(self); + return_value = _ssl__SSLSocket_cipher_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -207,12 +207,12 @@ static PyObject * _ssl__SSLSocket_version_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_version(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_version(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_version_impl(self); + return_value = _ssl__SSLSocket_version_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -230,12 +230,12 @@ static PyObject * _ssl__SSLSocket_selected_alpn_protocol_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_selected_alpn_protocol(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_selected_alpn_protocol(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_selected_alpn_protocol_impl(self); + return_value = _ssl__SSLSocket_selected_alpn_protocol_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -253,9 +253,9 @@ static PyObject * _ssl__SSLSocket_compression_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_compression(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_compression(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _ssl__SSLSocket_compression_impl(self); + return _ssl__SSLSocket_compression_impl((PySSLSocket *)self); } PyDoc_STRVAR(_ssl__SSLSocket_context__doc__, @@ -283,12 +283,12 @@ static PyObject * _ssl__SSLSocket_context_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_context_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_context_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_context_get_impl(self); + return_value = _ssl__SSLSocket_context_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -308,12 +308,12 @@ static int _ssl__SSLSocket_context_set_impl(PySSLSocket *self, PyObject *value); static int -_ssl__SSLSocket_context_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLSocket_context_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_context_set_impl(self, value); + return_value = _ssl__SSLSocket_context_set_impl((PySSLSocket *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -340,12 +340,12 @@ static PyObject * _ssl__SSLSocket_server_side_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_server_side_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_server_side_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_server_side_get_impl(self); + return_value = _ssl__SSLSocket_server_side_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -372,12 +372,12 @@ static PyObject * _ssl__SSLSocket_server_hostname_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_server_hostname_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_server_hostname_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_server_hostname_get_impl(self); + return_value = _ssl__SSLSocket_server_hostname_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -406,12 +406,12 @@ static PyObject * _ssl__SSLSocket_owner_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_owner_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_owner_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_owner_get_impl(self); + return_value = _ssl__SSLSocket_owner_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -431,12 +431,12 @@ static int _ssl__SSLSocket_owner_set_impl(PySSLSocket *self, PyObject *value); static int -_ssl__SSLSocket_owner_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLSocket_owner_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_owner_set_impl(self, value); + return_value = _ssl__SSLSocket_owner_set_impl((PySSLSocket *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -457,7 +457,7 @@ static PyObject * _ssl__SSLSocket_write_impl(PySSLSocket *self, Py_buffer *b); static PyObject * -_ssl__SSLSocket_write(PySSLSocket *self, PyObject *arg) +_ssl__SSLSocket_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer b = {NULL, NULL}; @@ -466,7 +466,7 @@ _ssl__SSLSocket_write(PySSLSocket *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_write_impl(self, &b); + return_value = _ssl__SSLSocket_write_impl((PySSLSocket *)self, &b); Py_END_CRITICAL_SECTION(); exit: @@ -491,12 +491,12 @@ static PyObject * _ssl__SSLSocket_pending_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_pending(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_pending(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_pending_impl(self); + return_value = _ssl__SSLSocket_pending_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -514,7 +514,7 @@ _ssl__SSLSocket_read_impl(PySSLSocket *self, Py_ssize_t len, int group_right_1, Py_buffer *buffer); static PyObject * -_ssl__SSLSocket_read(PySSLSocket *self, PyObject *args) +_ssl__SSLSocket_read(PyObject *self, PyObject *args) { PyObject *return_value = NULL; Py_ssize_t len; @@ -538,7 +538,7 @@ _ssl__SSLSocket_read(PySSLSocket *self, PyObject *args) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_read_impl(self, len, group_right_1, &buffer); + return_value = _ssl__SSLSocket_read_impl((PySSLSocket *)self, len, group_right_1, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -563,12 +563,12 @@ static PyObject * _ssl__SSLSocket_shutdown_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_shutdown(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_shutdown(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_shutdown_impl(self); + return_value = _ssl__SSLSocket_shutdown_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -592,7 +592,7 @@ _ssl__SSLSocket_get_channel_binding_impl(PySSLSocket *self, const char *cb_type); static PyObject * -_ssl__SSLSocket_get_channel_binding(PySSLSocket *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLSocket_get_channel_binding(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -647,7 +647,7 @@ _ssl__SSLSocket_get_channel_binding(PySSLSocket *self, PyObject *const *args, Py } skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_get_channel_binding_impl(self, cb_type); + return_value = _ssl__SSLSocket_get_channel_binding_impl((PySSLSocket *)self, cb_type); Py_END_CRITICAL_SECTION(); exit: @@ -667,12 +667,12 @@ static PyObject * _ssl__SSLSocket_verify_client_post_handshake_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_verify_client_post_handshake(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_verify_client_post_handshake(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_verify_client_post_handshake_impl(self); + return_value = _ssl__SSLSocket_verify_client_post_handshake_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -699,12 +699,12 @@ static PyObject * _ssl__SSLSocket_session_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_session_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_session_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_session_get_impl(self); + return_value = _ssl__SSLSocket_session_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -724,12 +724,12 @@ static int _ssl__SSLSocket_session_set_impl(PySSLSocket *self, PyObject *value); static int -_ssl__SSLSocket_session_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLSocket_session_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_session_set_impl(self, value); + return_value = _ssl__SSLSocket_session_set_impl((PySSLSocket *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -756,12 +756,12 @@ static PyObject * _ssl__SSLSocket_session_reused_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_session_reused_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_session_reused_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_session_reused_get_impl(self); + return_value = _ssl__SSLSocket_session_reused_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -808,7 +808,7 @@ static PyObject * _ssl__SSLContext_set_ciphers_impl(PySSLContext *self, const char *cipherlist); static PyObject * -_ssl__SSLContext_set_ciphers(PySSLContext *self, PyObject *arg) +_ssl__SSLContext_set_ciphers(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *cipherlist; @@ -827,7 +827,7 @@ _ssl__SSLContext_set_ciphers(PySSLContext *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_ciphers_impl(self, cipherlist); + return_value = _ssl__SSLContext_set_ciphers_impl((PySSLContext *)self, cipherlist); Py_END_CRITICAL_SECTION(); exit: @@ -846,12 +846,12 @@ static PyObject * _ssl__SSLContext_get_ciphers_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_get_ciphers(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_get_ciphers(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_get_ciphers_impl(self); + return_value = _ssl__SSLContext_get_ciphers_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -870,7 +870,7 @@ _ssl__SSLContext__set_alpn_protocols_impl(PySSLContext *self, Py_buffer *protos); static PyObject * -_ssl__SSLContext__set_alpn_protocols(PySSLContext *self, PyObject *arg) +_ssl__SSLContext__set_alpn_protocols(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer protos = {NULL, NULL}; @@ -879,7 +879,7 @@ _ssl__SSLContext__set_alpn_protocols(PySSLContext *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__set_alpn_protocols_impl(self, &protos); + return_value = _ssl__SSLContext__set_alpn_protocols_impl((PySSLContext *)self, &protos); Py_END_CRITICAL_SECTION(); exit: @@ -905,12 +905,12 @@ static PyObject * _ssl__SSLContext_verify_mode_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_verify_mode_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_mode_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_mode_get_impl(self); + return_value = _ssl__SSLContext_verify_mode_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -930,12 +930,12 @@ static int _ssl__SSLContext_verify_mode_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_verify_mode_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_mode_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_mode_set_impl(self, value); + return_value = _ssl__SSLContext_verify_mode_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -955,12 +955,12 @@ static PyObject * _ssl__SSLContext_verify_flags_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_verify_flags_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_flags_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_flags_get_impl(self); + return_value = _ssl__SSLContext_verify_flags_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -980,12 +980,12 @@ static int _ssl__SSLContext_verify_flags_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_verify_flags_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_flags_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_flags_set_impl(self, value); + return_value = _ssl__SSLContext_verify_flags_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1005,12 +1005,12 @@ static PyObject * _ssl__SSLContext_minimum_version_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_minimum_version_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_minimum_version_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_minimum_version_get_impl(self); + return_value = _ssl__SSLContext_minimum_version_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1031,12 +1031,12 @@ _ssl__SSLContext_minimum_version_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_minimum_version_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_minimum_version_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_minimum_version_set_impl(self, value); + return_value = _ssl__SSLContext_minimum_version_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1056,12 +1056,12 @@ static PyObject * _ssl__SSLContext_maximum_version_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_maximum_version_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_maximum_version_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_maximum_version_get_impl(self); + return_value = _ssl__SSLContext_maximum_version_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1082,12 +1082,12 @@ _ssl__SSLContext_maximum_version_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_maximum_version_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_maximum_version_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_maximum_version_set_impl(self, value); + return_value = _ssl__SSLContext_maximum_version_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1114,12 +1114,12 @@ static PyObject * _ssl__SSLContext_num_tickets_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_num_tickets_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_num_tickets_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_num_tickets_get_impl(self); + return_value = _ssl__SSLContext_num_tickets_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1139,12 +1139,12 @@ static int _ssl__SSLContext_num_tickets_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_num_tickets_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_num_tickets_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_num_tickets_set_impl(self, value); + return_value = _ssl__SSLContext_num_tickets_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1171,12 +1171,12 @@ static PyObject * _ssl__SSLContext_security_level_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_security_level_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_security_level_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_security_level_get_impl(self); + return_value = _ssl__SSLContext_security_level_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1196,12 +1196,12 @@ static PyObject * _ssl__SSLContext_options_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_options_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_options_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_options_get_impl(self); + return_value = _ssl__SSLContext_options_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1221,12 +1221,12 @@ static int _ssl__SSLContext_options_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_options_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_options_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_options_set_impl(self, value); + return_value = _ssl__SSLContext_options_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1246,12 +1246,12 @@ static PyObject * _ssl__SSLContext__host_flags_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext__host_flags_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext__host_flags_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__host_flags_get_impl(self); + return_value = _ssl__SSLContext__host_flags_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1271,12 +1271,12 @@ static int _ssl__SSLContext__host_flags_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext__host_flags_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext__host_flags_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__host_flags_set_impl(self, value); + return_value = _ssl__SSLContext__host_flags_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1296,12 +1296,12 @@ static PyObject * _ssl__SSLContext_check_hostname_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_check_hostname_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_check_hostname_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_check_hostname_get_impl(self); + return_value = _ssl__SSLContext_check_hostname_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1321,12 +1321,12 @@ static int _ssl__SSLContext_check_hostname_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_check_hostname_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_check_hostname_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_check_hostname_set_impl(self, value); + return_value = _ssl__SSLContext_check_hostname_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1346,12 +1346,12 @@ static PyObject * _ssl__SSLContext_protocol_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_protocol_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_protocol_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_protocol_get_impl(self); + return_value = _ssl__SSLContext_protocol_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1370,7 +1370,7 @@ _ssl__SSLContext_load_cert_chain_impl(PySSLContext *self, PyObject *certfile, PyObject *keyfile, PyObject *password); static PyObject * -_ssl__SSLContext_load_cert_chain(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_load_cert_chain(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1422,7 +1422,7 @@ _ssl__SSLContext_load_cert_chain(PySSLContext *self, PyObject *const *args, Py_s password = args[2]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_load_cert_chain_impl(self, certfile, keyfile, password); + return_value = _ssl__SSLContext_load_cert_chain_impl((PySSLContext *)self, certfile, keyfile, password); Py_END_CRITICAL_SECTION(); exit: @@ -1444,7 +1444,7 @@ _ssl__SSLContext_load_verify_locations_impl(PySSLContext *self, PyObject *cadata); static PyObject * -_ssl__SSLContext_load_verify_locations(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_load_verify_locations(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1501,7 +1501,7 @@ _ssl__SSLContext_load_verify_locations(PySSLContext *self, PyObject *const *args cadata = args[2]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_load_verify_locations_impl(self, cafile, capath, cadata); + return_value = _ssl__SSLContext_load_verify_locations_impl((PySSLContext *)self, cafile, capath, cadata); Py_END_CRITICAL_SECTION(); exit: @@ -1525,7 +1525,7 @@ _ssl__SSLContext_load_dh_params(PySSLContext *self, PyObject *filepath) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_load_dh_params_impl(self, filepath); + return_value = _ssl__SSLContext_load_dh_params_impl((PySSLContext *)self, filepath); Py_END_CRITICAL_SECTION(); return return_value; @@ -1546,7 +1546,7 @@ _ssl__SSLContext__wrap_socket_impl(PySSLContext *self, PyObject *sock, PyObject *owner, PyObject *session); static PyObject * -_ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext__wrap_socket(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1618,7 +1618,7 @@ _ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssiz session = args[4]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__wrap_socket_impl(self, sock, server_side, hostname_obj, owner, session); + return_value = _ssl__SSLContext__wrap_socket_impl((PySSLContext *)self, sock, server_side, hostname_obj, owner, session); Py_END_CRITICAL_SECTION(); exit: @@ -1641,7 +1641,7 @@ _ssl__SSLContext__wrap_bio_impl(PySSLContext *self, PySSLMemoryBIO *incoming, PyObject *session); static PyObject * -_ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext__wrap_bio(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1719,7 +1719,7 @@ _ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t session = args[5]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__wrap_bio_impl(self, incoming, outgoing, server_side, hostname_obj, owner, session); + return_value = _ssl__SSLContext__wrap_bio_impl((PySSLContext *)self, incoming, outgoing, server_side, hostname_obj, owner, session); Py_END_CRITICAL_SECTION(); exit: @@ -1738,12 +1738,12 @@ static PyObject * _ssl__SSLContext_session_stats_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_session_stats(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_session_stats(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_session_stats_impl(self); + return_value = _ssl__SSLContext_session_stats_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1761,12 +1761,12 @@ static PyObject * _ssl__SSLContext_set_default_verify_paths_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_set_default_verify_paths(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_set_default_verify_paths(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_default_verify_paths_impl(self); + return_value = _ssl__SSLContext_set_default_verify_paths_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1789,7 +1789,7 @@ _ssl__SSLContext_set_ecdh_curve(PySSLContext *self, PyObject *name) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_ecdh_curve_impl(self, name); + return_value = _ssl__SSLContext_set_ecdh_curve_impl((PySSLContext *)self, name); Py_END_CRITICAL_SECTION(); return return_value; @@ -1821,12 +1821,12 @@ static PyObject * _ssl__SSLContext_sni_callback_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_sni_callback_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_sni_callback_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_sni_callback_get_impl(self); + return_value = _ssl__SSLContext_sni_callback_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1846,12 +1846,12 @@ static int _ssl__SSLContext_sni_callback_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_sni_callback_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_sni_callback_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_sni_callback_set_impl(self, value); + return_value = _ssl__SSLContext_sni_callback_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1876,12 +1876,12 @@ static PyObject * _ssl__SSLContext_cert_store_stats_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_cert_store_stats(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_cert_store_stats(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_cert_store_stats_impl(self); + return_value = _ssl__SSLContext_cert_store_stats_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1906,7 +1906,7 @@ static PyObject * _ssl__SSLContext_get_ca_certs_impl(PySSLContext *self, int binary_form); static PyObject * -_ssl__SSLContext_get_ca_certs(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_get_ca_certs(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1952,7 +1952,7 @@ _ssl__SSLContext_get_ca_certs(PySSLContext *self, PyObject *const *args, Py_ssiz } skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_get_ca_certs_impl(self, binary_form); + return_value = _ssl__SSLContext_get_ca_certs_impl((PySSLContext *)self, binary_form); Py_END_CRITICAL_SECTION(); exit: @@ -1972,7 +1972,7 @@ _ssl__SSLContext_set_psk_client_callback_impl(PySSLContext *self, PyObject *callback); static PyObject * -_ssl__SSLContext_set_psk_client_callback(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_set_psk_client_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -2010,7 +2010,7 @@ _ssl__SSLContext_set_psk_client_callback(PySSLContext *self, PyObject *const *ar } callback = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_psk_client_callback_impl(self, callback); + return_value = _ssl__SSLContext_set_psk_client_callback_impl((PySSLContext *)self, callback); Py_END_CRITICAL_SECTION(); exit: @@ -2031,7 +2031,7 @@ _ssl__SSLContext_set_psk_server_callback_impl(PySSLContext *self, const char *identity_hint); static PyObject * -_ssl__SSLContext_set_psk_server_callback(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_set_psk_server_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -2093,7 +2093,7 @@ _ssl__SSLContext_set_psk_server_callback(PySSLContext *self, PyObject *const *ar } skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_psk_server_callback_impl(self, callback, identity_hint); + return_value = _ssl__SSLContext_set_psk_server_callback_impl((PySSLContext *)self, callback, identity_hint); Py_END_CRITICAL_SECTION(); exit: @@ -2146,12 +2146,12 @@ static PyObject * _ssl_MemoryBIO_pending_get_impl(PySSLMemoryBIO *self); static PyObject * -_ssl_MemoryBIO_pending_get(PySSLMemoryBIO *self, void *Py_UNUSED(context)) +_ssl_MemoryBIO_pending_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_pending_get_impl(self); + return_value = _ssl_MemoryBIO_pending_get_impl((PySSLMemoryBIO *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2178,12 +2178,12 @@ static PyObject * _ssl_MemoryBIO_eof_get_impl(PySSLMemoryBIO *self); static PyObject * -_ssl_MemoryBIO_eof_get(PySSLMemoryBIO *self, void *Py_UNUSED(context)) +_ssl_MemoryBIO_eof_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_eof_get_impl(self); + return_value = _ssl_MemoryBIO_eof_get_impl((PySSLMemoryBIO *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2207,7 +2207,7 @@ static PyObject * _ssl_MemoryBIO_read_impl(PySSLMemoryBIO *self, int len); static PyObject * -_ssl_MemoryBIO_read(PySSLMemoryBIO *self, PyObject *const *args, Py_ssize_t nargs) +_ssl_MemoryBIO_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int len = -1; @@ -2224,7 +2224,7 @@ _ssl_MemoryBIO_read(PySSLMemoryBIO *self, PyObject *const *args, Py_ssize_t narg } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_read_impl(self, len); + return_value = _ssl_MemoryBIO_read_impl((PySSLMemoryBIO *)self, len); Py_END_CRITICAL_SECTION(); exit: @@ -2246,7 +2246,7 @@ static PyObject * _ssl_MemoryBIO_write_impl(PySSLMemoryBIO *self, Py_buffer *b); static PyObject * -_ssl_MemoryBIO_write(PySSLMemoryBIO *self, PyObject *arg) +_ssl_MemoryBIO_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer b = {NULL, NULL}; @@ -2255,7 +2255,7 @@ _ssl_MemoryBIO_write(PySSLMemoryBIO *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_write_impl(self, &b); + return_value = _ssl_MemoryBIO_write_impl((PySSLMemoryBIO *)self, &b); Py_END_CRITICAL_SECTION(); exit: @@ -2282,12 +2282,12 @@ static PyObject * _ssl_MemoryBIO_write_eof_impl(PySSLMemoryBIO *self); static PyObject * -_ssl_MemoryBIO_write_eof(PySSLMemoryBIO *self, PyObject *Py_UNUSED(ignored)) +_ssl_MemoryBIO_write_eof(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_write_eof_impl(self); + return_value = _ssl_MemoryBIO_write_eof_impl((PySSLMemoryBIO *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2314,12 +2314,12 @@ static PyObject * _ssl_SSLSession_time_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_time_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_time_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_time_get_impl(self); + return_value = _ssl_SSLSession_time_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2346,12 +2346,12 @@ static PyObject * _ssl_SSLSession_timeout_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_timeout_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_timeout_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_timeout_get_impl(self); + return_value = _ssl_SSLSession_timeout_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2378,12 +2378,12 @@ static PyObject * _ssl_SSLSession_ticket_lifetime_hint_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_ticket_lifetime_hint_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_ticket_lifetime_hint_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_ticket_lifetime_hint_get_impl(self); + return_value = _ssl_SSLSession_ticket_lifetime_hint_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2410,12 +2410,12 @@ static PyObject * _ssl_SSLSession_id_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_id_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_id_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_id_get_impl(self); + return_value = _ssl_SSLSession_id_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2442,12 +2442,12 @@ static PyObject * _ssl_SSLSession_has_ticket_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_has_ticket_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_has_ticket_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_has_ticket_get_impl(self); + return_value = _ssl_SSLSession_has_ticket_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2878,4 +2878,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #ifndef _SSL_ENUM_CRLS_METHODDEF #define _SSL_ENUM_CRLS_METHODDEF #endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */ -/*[clinic end generated code: output=e71f1ef621aead08 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bededfb2b927bd41 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_struct.c.h b/Modules/clinic/_struct.c.h index cfc2fe7fc1dd58..7cf179f7a69d55 100644 --- a/Modules/clinic/_struct.c.h +++ b/Modules/clinic/_struct.c.h @@ -87,7 +87,7 @@ static PyObject * Struct_unpack_impl(PyStructObject *self, Py_buffer *buffer); static PyObject * -Struct_unpack(PyStructObject *self, PyObject *arg) +Struct_unpack(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -95,7 +95,7 @@ Struct_unpack(PyStructObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &buffer, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = Struct_unpack_impl(self, &buffer); + return_value = Struct_unpack_impl((PyStructObject *)self, &buffer); exit: /* Cleanup for buffer */ @@ -127,7 +127,7 @@ Struct_unpack_from_impl(PyStructObject *self, Py_buffer *buffer, Py_ssize_t offset); static PyObject * -Struct_unpack_from(PyStructObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Struct_unpack_from(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -184,7 +184,7 @@ Struct_unpack_from(PyStructObject *self, PyObject *const *args, Py_ssize_t nargs offset = ival; } skip_optional_pos: - return_value = Struct_unpack_from_impl(self, &buffer, offset); + return_value = Struct_unpack_from_impl((PyStructObject *)self, &buffer, offset); exit: /* Cleanup for buffer */ @@ -439,4 +439,4 @@ iter_unpack(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } -/*[clinic end generated code: output=faff90f99c6bd09f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ec540c21be08e1d0 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testmultiphase.c.h b/Modules/clinic/_testmultiphase.c.h index 5a432a6f70386d..01c29c0753ae13 100644 --- a/Modules/clinic/_testmultiphase.c.h +++ b/Modules/clinic/_testmultiphase.c.h @@ -25,13 +25,13 @@ _testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject * PyTypeObject *cls); static PyObject * -_testmultiphase_StateAccessType_get_defining_module(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_get_defining_module(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "get_defining_module() takes no arguments"); return NULL; } - return _testmultiphase_StateAccessType_get_defining_module_impl(self, cls); + return _testmultiphase_StateAccessType_get_defining_module_impl((StateAccessTypeObject *)self, cls); } PyDoc_STRVAR(_testmultiphase_StateAccessType_getmodulebydef_bad_def__doc__, @@ -48,13 +48,13 @@ _testmultiphase_StateAccessType_getmodulebydef_bad_def_impl(StateAccessTypeObjec PyTypeObject *cls); static PyObject * -_testmultiphase_StateAccessType_getmodulebydef_bad_def(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_getmodulebydef_bad_def(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "getmodulebydef_bad_def() takes no arguments"); return NULL; } - return _testmultiphase_StateAccessType_getmodulebydef_bad_def_impl(self, cls); + return _testmultiphase_StateAccessType_getmodulebydef_bad_def_impl((StateAccessTypeObject *)self, cls); } PyDoc_STRVAR(_testmultiphase_StateAccessType_increment_count_clinic__doc__, @@ -76,7 +76,7 @@ _testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObjec int n, int twice); static PyObject * -_testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_increment_count_clinic(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -135,7 +135,7 @@ _testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *se goto exit; } skip_optional_kwonly: - return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl(self, cls, n, twice); + return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl((StateAccessTypeObject *)self, cls, n, twice); exit: return return_value; @@ -155,12 +155,12 @@ _testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, PyTypeObject *cls); static PyObject * -_testmultiphase_StateAccessType_get_count(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_get_count(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "get_count() takes no arguments"); return NULL; } - return _testmultiphase_StateAccessType_get_count_impl(self, cls); + return _testmultiphase_StateAccessType_get_count_impl((StateAccessTypeObject *)self, cls); } -/*[clinic end generated code: output=c1aa0af3572bf059 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ea0ca98e467e53c2 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_threadmodule.c.h b/Modules/clinic/_threadmodule.c.h index 8f0507d40285b3..09b7afebd6d8d9 100644 --- a/Modules/clinic/_threadmodule.c.h +++ b/Modules/clinic/_threadmodule.c.h @@ -8,7 +8,7 @@ preserve #endif #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() -#if defined(HAVE_PTHREAD_GETNAME_NP) +#if (defined(HAVE_PTHREAD_GETNAME_NP) || defined(MS_WINDOWS)) PyDoc_STRVAR(_thread__get_name__doc__, "_get_name($module, /)\n" @@ -28,9 +28,9 @@ _thread__get_name(PyObject *module, PyObject *Py_UNUSED(ignored)) return _thread__get_name_impl(module); } -#endif /* defined(HAVE_PTHREAD_GETNAME_NP) */ +#endif /* (defined(HAVE_PTHREAD_GETNAME_NP) || defined(MS_WINDOWS)) */ -#if defined(HAVE_PTHREAD_SETNAME_NP) +#if (defined(HAVE_PTHREAD_SETNAME_NP) || defined(MS_WINDOWS)) PyDoc_STRVAR(_thread_set_name__doc__, "set_name($module, /, name)\n" @@ -92,7 +92,7 @@ _thread_set_name(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb return return_value; } -#endif /* defined(HAVE_PTHREAD_SETNAME_NP) */ +#endif /* (defined(HAVE_PTHREAD_SETNAME_NP) || defined(MS_WINDOWS)) */ #ifndef _THREAD__GET_NAME_METHODDEF #define _THREAD__GET_NAME_METHODDEF @@ -101,4 +101,4 @@ _thread_set_name(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb #ifndef _THREAD_SET_NAME_METHODDEF #define _THREAD_SET_NAME_METHODDEF #endif /* !defined(_THREAD_SET_NAME_METHODDEF) */ -/*[clinic end generated code: output=b5cb85aaccc45bf6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6e88ef6b126cece8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_tkinter.c.h b/Modules/clinic/_tkinter.c.h index 2b1ac954b4d570..d6e783b04fe968 100644 --- a/Modules/clinic/_tkinter.c.h +++ b/Modules/clinic/_tkinter.c.h @@ -16,7 +16,7 @@ static PyObject * _tkinter_tkapp_eval_impl(TkappObject *self, const char *script); static PyObject * -_tkinter_tkapp_eval(TkappObject *self, PyObject *arg) +_tkinter_tkapp_eval(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *script; @@ -34,7 +34,7 @@ _tkinter_tkapp_eval(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_eval_impl(self, script); + return_value = _tkinter_tkapp_eval_impl((TkappObject *)self, script); exit: return return_value; @@ -52,7 +52,7 @@ static PyObject * _tkinter_tkapp_evalfile_impl(TkappObject *self, const char *fileName); static PyObject * -_tkinter_tkapp_evalfile(TkappObject *self, PyObject *arg) +_tkinter_tkapp_evalfile(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *fileName; @@ -70,7 +70,7 @@ _tkinter_tkapp_evalfile(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_evalfile_impl(self, fileName); + return_value = _tkinter_tkapp_evalfile_impl((TkappObject *)self, fileName); exit: return return_value; @@ -88,7 +88,7 @@ static PyObject * _tkinter_tkapp_record_impl(TkappObject *self, const char *script); static PyObject * -_tkinter_tkapp_record(TkappObject *self, PyObject *arg) +_tkinter_tkapp_record(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *script; @@ -106,7 +106,7 @@ _tkinter_tkapp_record(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_record_impl(self, script); + return_value = _tkinter_tkapp_record_impl((TkappObject *)self, script); exit: return return_value; @@ -124,7 +124,7 @@ static PyObject * _tkinter_tkapp_adderrorinfo_impl(TkappObject *self, const char *msg); static PyObject * -_tkinter_tkapp_adderrorinfo(TkappObject *self, PyObject *arg) +_tkinter_tkapp_adderrorinfo(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *msg; @@ -142,7 +142,7 @@ _tkinter_tkapp_adderrorinfo(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_adderrorinfo_impl(self, msg); + return_value = _tkinter_tkapp_adderrorinfo_impl((TkappObject *)self, msg); exit: return return_value; @@ -184,7 +184,7 @@ static PyObject * _tkinter_tkapp_exprstring_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprstring(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprstring(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -202,7 +202,7 @@ _tkinter_tkapp_exprstring(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprstring_impl(self, s); + return_value = _tkinter_tkapp_exprstring_impl((TkappObject *)self, s); exit: return return_value; @@ -220,7 +220,7 @@ static PyObject * _tkinter_tkapp_exprlong_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprlong(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprlong(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -238,7 +238,7 @@ _tkinter_tkapp_exprlong(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprlong_impl(self, s); + return_value = _tkinter_tkapp_exprlong_impl((TkappObject *)self, s); exit: return return_value; @@ -256,7 +256,7 @@ static PyObject * _tkinter_tkapp_exprdouble_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprdouble(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprdouble(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -274,7 +274,7 @@ _tkinter_tkapp_exprdouble(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprdouble_impl(self, s); + return_value = _tkinter_tkapp_exprdouble_impl((TkappObject *)self, s); exit: return return_value; @@ -292,7 +292,7 @@ static PyObject * _tkinter_tkapp_exprboolean_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprboolean(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprboolean(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -310,7 +310,7 @@ _tkinter_tkapp_exprboolean(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprboolean_impl(self, s); + return_value = _tkinter_tkapp_exprboolean_impl((TkappObject *)self, s); exit: return return_value; @@ -337,7 +337,7 @@ _tkinter_tkapp_createcommand_impl(TkappObject *self, const char *name, PyObject *func); static PyObject * -_tkinter_tkapp_createcommand(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_createcommand(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; const char *name; @@ -360,7 +360,7 @@ _tkinter_tkapp_createcommand(TkappObject *self, PyObject *const *args, Py_ssize_ goto exit; } func = args[1]; - return_value = _tkinter_tkapp_createcommand_impl(self, name, func); + return_value = _tkinter_tkapp_createcommand_impl((TkappObject *)self, name, func); exit: return return_value; @@ -378,7 +378,7 @@ static PyObject * _tkinter_tkapp_deletecommand_impl(TkappObject *self, const char *name); static PyObject * -_tkinter_tkapp_deletecommand(TkappObject *self, PyObject *arg) +_tkinter_tkapp_deletecommand(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *name; @@ -396,7 +396,7 @@ _tkinter_tkapp_deletecommand(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_deletecommand_impl(self, name); + return_value = _tkinter_tkapp_deletecommand_impl((TkappObject *)self, name); exit: return return_value; @@ -417,7 +417,7 @@ _tkinter_tkapp_createfilehandler_impl(TkappObject *self, PyObject *file, int mask, PyObject *func); static PyObject * -_tkinter_tkapp_createfilehandler(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_createfilehandler(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *file; @@ -433,7 +433,7 @@ _tkinter_tkapp_createfilehandler(TkappObject *self, PyObject *const *args, Py_ss goto exit; } func = args[2]; - return_value = _tkinter_tkapp_createfilehandler_impl(self, file, mask, func); + return_value = _tkinter_tkapp_createfilehandler_impl((TkappObject *)self, file, mask, func); exit: return return_value; @@ -465,9 +465,9 @@ static PyObject * _tkinter_tktimertoken_deletetimerhandler_impl(TkttObject *self); static PyObject * -_tkinter_tktimertoken_deletetimerhandler(TkttObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tktimertoken_deletetimerhandler(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tktimertoken_deletetimerhandler_impl(self); + return _tkinter_tktimertoken_deletetimerhandler_impl((TkttObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_createtimerhandler__doc__, @@ -483,7 +483,7 @@ _tkinter_tkapp_createtimerhandler_impl(TkappObject *self, int milliseconds, PyObject *func); static PyObject * -_tkinter_tkapp_createtimerhandler(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_createtimerhandler(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int milliseconds; @@ -497,7 +497,7 @@ _tkinter_tkapp_createtimerhandler(TkappObject *self, PyObject *const *args, Py_s goto exit; } func = args[1]; - return_value = _tkinter_tkapp_createtimerhandler_impl(self, milliseconds, func); + return_value = _tkinter_tkapp_createtimerhandler_impl((TkappObject *)self, milliseconds, func); exit: return return_value; @@ -515,7 +515,7 @@ static PyObject * _tkinter_tkapp_mainloop_impl(TkappObject *self, int threshold); static PyObject * -_tkinter_tkapp_mainloop(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_mainloop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int threshold = 0; @@ -531,7 +531,7 @@ _tkinter_tkapp_mainloop(TkappObject *self, PyObject *const *args, Py_ssize_t nar goto exit; } skip_optional: - return_value = _tkinter_tkapp_mainloop_impl(self, threshold); + return_value = _tkinter_tkapp_mainloop_impl((TkappObject *)self, threshold); exit: return return_value; @@ -549,7 +549,7 @@ static PyObject * _tkinter_tkapp_dooneevent_impl(TkappObject *self, int flags); static PyObject * -_tkinter_tkapp_dooneevent(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_dooneevent(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int flags = 0; @@ -565,7 +565,7 @@ _tkinter_tkapp_dooneevent(TkappObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional: - return_value = _tkinter_tkapp_dooneevent_impl(self, flags); + return_value = _tkinter_tkapp_dooneevent_impl((TkappObject *)self, flags); exit: return return_value; @@ -583,9 +583,9 @@ static PyObject * _tkinter_tkapp_quit_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_quit(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_quit(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_quit_impl(self); + return _tkinter_tkapp_quit_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_interpaddr__doc__, @@ -600,9 +600,9 @@ static PyObject * _tkinter_tkapp_interpaddr_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_interpaddr(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_interpaddr(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_interpaddr_impl(self); + return _tkinter_tkapp_interpaddr_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_loadtk__doc__, @@ -617,9 +617,9 @@ static PyObject * _tkinter_tkapp_loadtk_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_loadtk(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_loadtk(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_loadtk_impl(self); + return _tkinter_tkapp_loadtk_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_settrace__doc__, @@ -644,9 +644,9 @@ static PyObject * _tkinter_tkapp_gettrace_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_gettrace(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_gettrace(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_gettrace_impl(self); + return _tkinter_tkapp_gettrace_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_willdispatch__doc__, @@ -661,9 +661,9 @@ static PyObject * _tkinter_tkapp_willdispatch_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_willdispatch(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_willdispatch(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_willdispatch_impl(self); + return _tkinter_tkapp_willdispatch_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter__flatten__doc__, @@ -888,4 +888,4 @@ _tkinter_getbusywaitinterval(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #endif /* !defined(_TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF) */ -/*[clinic end generated code: output=d90c1a9850c63249 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=172a98df5f209a84 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index 8bbecc44dc9c11..6a2f8d45cd4e0c 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -21,7 +21,7 @@ static PyObject * _winapi_Overlapped_GetOverlappedResult_impl(OverlappedObject *self, int wait); static PyObject * -_winapi_Overlapped_GetOverlappedResult(OverlappedObject *self, PyObject *arg) +_winapi_Overlapped_GetOverlappedResult(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int wait; @@ -30,7 +30,7 @@ _winapi_Overlapped_GetOverlappedResult(OverlappedObject *self, PyObject *arg) if (wait < 0) { goto exit; } - return_value = _winapi_Overlapped_GetOverlappedResult_impl(self, wait); + return_value = _winapi_Overlapped_GetOverlappedResult_impl((OverlappedObject *)self, wait); exit: return return_value; @@ -48,9 +48,9 @@ static PyObject * _winapi_Overlapped_getbuffer_impl(OverlappedObject *self); static PyObject * -_winapi_Overlapped_getbuffer(OverlappedObject *self, PyObject *Py_UNUSED(ignored)) +_winapi_Overlapped_getbuffer(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _winapi_Overlapped_getbuffer_impl(self); + return _winapi_Overlapped_getbuffer_impl((OverlappedObject *)self); } PyDoc_STRVAR(_winapi_Overlapped_cancel__doc__, @@ -65,9 +65,9 @@ static PyObject * _winapi_Overlapped_cancel_impl(OverlappedObject *self); static PyObject * -_winapi_Overlapped_cancel(OverlappedObject *self, PyObject *Py_UNUSED(ignored)) +_winapi_Overlapped_cancel(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _winapi_Overlapped_cancel_impl(self); + return _winapi_Overlapped_cancel_impl((OverlappedObject *)self); } PyDoc_STRVAR(_winapi_CloseHandle__doc__, @@ -2127,4 +2127,4 @@ _winapi_CopyFile2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO return return_value; } -/*[clinic end generated code: output=b2a178bde6868e88 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=06b56212b2186250 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index 4a7266ecb8b84f..c5b62b16699d06 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -21,9 +21,9 @@ static PyObject * array_array_clear_impl(arrayobject *self); static PyObject * -array_array_clear(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_clear_impl(self); + return array_array_clear_impl((arrayobject *)self); } PyDoc_STRVAR(array_array___copy____doc__, @@ -39,9 +39,9 @@ static PyObject * array_array___copy___impl(arrayobject *self); static PyObject * -array_array___copy__(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array___copy__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array___copy___impl(self); + return array_array___copy___impl((arrayobject *)self); } PyDoc_STRVAR(array_array___deepcopy____doc__, @@ -78,7 +78,7 @@ array_array_index_impl(arrayobject *self, PyObject *v, Py_ssize_t start, Py_ssize_t stop); static PyObject * -array_array_index(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) +array_array_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *v; @@ -102,7 +102,7 @@ array_array_index(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = array_array_index_impl(self, v, start, stop); + return_value = array_array_index_impl((arrayobject *)self, v, start, stop); exit: return return_value; @@ -132,7 +132,7 @@ static PyObject * array_array_pop_impl(arrayobject *self, Py_ssize_t i); static PyObject * -array_array_pop(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) +array_array_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t i = -1; @@ -156,7 +156,7 @@ array_array_pop(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) i = ival; } skip_optional: - return_value = array_array_pop_impl(self, i); + return_value = array_array_pop_impl((arrayobject *)self, i); exit: return return_value; @@ -175,7 +175,7 @@ static PyObject * array_array_extend_impl(arrayobject *self, PyTypeObject *cls, PyObject *bb); static PyObject * -array_array_extend(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array_extend(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -200,7 +200,7 @@ array_array_extend(arrayobject *self, PyTypeObject *cls, PyObject *const *args, goto exit; } bb = args[0]; - return_value = array_array_extend_impl(self, cls, bb); + return_value = array_array_extend_impl((arrayobject *)self, cls, bb); exit: return return_value; @@ -219,7 +219,7 @@ static PyObject * array_array_insert_impl(arrayobject *self, Py_ssize_t i, PyObject *v); static PyObject * -array_array_insert(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) +array_array_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t i; @@ -241,7 +241,7 @@ array_array_insert(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) i = ival; } v = args[1]; - return_value = array_array_insert_impl(self, i, v); + return_value = array_array_insert_impl((arrayobject *)self, i, v); exit: return return_value; @@ -263,9 +263,9 @@ static PyObject * array_array_buffer_info_impl(arrayobject *self); static PyObject * -array_array_buffer_info(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_buffer_info(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_buffer_info_impl(self); + return array_array_buffer_info_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_append__doc__, @@ -293,9 +293,9 @@ static PyObject * array_array_byteswap_impl(arrayobject *self); static PyObject * -array_array_byteswap(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_byteswap(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_byteswap_impl(self); + return array_array_byteswap_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_reverse__doc__, @@ -311,9 +311,9 @@ static PyObject * array_array_reverse_impl(arrayobject *self); static PyObject * -array_array_reverse(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_reverse(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_reverse_impl(self); + return array_array_reverse_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_fromfile__doc__, @@ -330,7 +330,7 @@ array_array_fromfile_impl(arrayobject *self, PyTypeObject *cls, PyObject *f, Py_ssize_t n); static PyObject * -array_array_fromfile(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array_fromfile(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -368,7 +368,7 @@ array_array_fromfile(arrayobject *self, PyTypeObject *cls, PyObject *const *args } n = ival; } - return_value = array_array_fromfile_impl(self, cls, f, n); + return_value = array_array_fromfile_impl((arrayobject *)self, cls, f, n); exit: return return_value; @@ -387,7 +387,7 @@ static PyObject * array_array_tofile_impl(arrayobject *self, PyTypeObject *cls, PyObject *f); static PyObject * -array_array_tofile(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array_tofile(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -412,7 +412,7 @@ array_array_tofile(arrayobject *self, PyTypeObject *cls, PyObject *const *args, goto exit; } f = args[0]; - return_value = array_array_tofile_impl(self, cls, f); + return_value = array_array_tofile_impl((arrayobject *)self, cls, f); exit: return return_value; @@ -440,9 +440,9 @@ static PyObject * array_array_tolist_impl(arrayobject *self); static PyObject * -array_array_tolist(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_tolist(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_tolist_impl(self); + return array_array_tolist_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_frombytes__doc__, @@ -458,7 +458,7 @@ static PyObject * array_array_frombytes_impl(arrayobject *self, Py_buffer *buffer); static PyObject * -array_array_frombytes(arrayobject *self, PyObject *arg) +array_array_frombytes(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -466,7 +466,7 @@ array_array_frombytes(arrayobject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &buffer, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = array_array_frombytes_impl(self, &buffer); + return_value = array_array_frombytes_impl((arrayobject *)self, &buffer); exit: /* Cleanup for buffer */ @@ -490,9 +490,9 @@ static PyObject * array_array_tobytes_impl(arrayobject *self); static PyObject * -array_array_tobytes(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_tobytes(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_tobytes_impl(self); + return array_array_tobytes_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_fromunicode__doc__, @@ -512,7 +512,7 @@ static PyObject * array_array_fromunicode_impl(arrayobject *self, PyObject *ustr); static PyObject * -array_array_fromunicode(arrayobject *self, PyObject *arg) +array_array_fromunicode(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *ustr; @@ -522,7 +522,7 @@ array_array_fromunicode(arrayobject *self, PyObject *arg) goto exit; } ustr = arg; - return_value = array_array_fromunicode_impl(self, ustr); + return_value = array_array_fromunicode_impl((arrayobject *)self, ustr); exit: return return_value; @@ -545,9 +545,9 @@ static PyObject * array_array_tounicode_impl(arrayobject *self); static PyObject * -array_array_tounicode(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_tounicode(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_tounicode_impl(self); + return array_array_tounicode_impl((arrayobject *)self); } PyDoc_STRVAR(array_array___sizeof____doc__, @@ -563,9 +563,9 @@ static PyObject * array_array___sizeof___impl(arrayobject *self); static PyObject * -array_array___sizeof__(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array___sizeof___impl(self); + return array_array___sizeof___impl((arrayobject *)self); } PyDoc_STRVAR(array__array_reconstructor__doc__, @@ -634,7 +634,7 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, PyObject *value); static PyObject * -array_array___reduce_ex__(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array___reduce_ex__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -659,7 +659,7 @@ array_array___reduce_ex__(arrayobject *self, PyTypeObject *cls, PyObject *const goto exit; } value = args[0]; - return_value = array_array___reduce_ex___impl(self, cls, value); + return_value = array_array___reduce_ex___impl((arrayobject *)self, cls, value); exit: return return_value; @@ -678,13 +678,13 @@ static PyObject * array_arrayiterator___reduce___impl(arrayiterobject *self, PyTypeObject *cls); static PyObject * -array_arrayiterator___reduce__(arrayiterobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_arrayiterator___reduce__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__reduce__() takes no arguments"); return NULL; } - return array_arrayiterator___reduce___impl(self, cls); + return array_arrayiterator___reduce___impl((arrayiterobject *)self, cls); } PyDoc_STRVAR(array_arrayiterator___setstate____doc__, @@ -695,4 +695,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=22dbe12826bfa86f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8120dc5c4fa414b9 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/blake2module.c.h b/Modules/clinic/blake2module.c.h index f695f27e9e6c42..b5ac90143a1740 100644 --- a/Modules/clinic/blake2module.c.h +++ b/Modules/clinic/blake2module.c.h @@ -412,9 +412,9 @@ static PyObject * _blake2_blake2b_copy_impl(Blake2Object *self); static PyObject * -_blake2_blake2b_copy(Blake2Object *self, PyObject *Py_UNUSED(ignored)) +_blake2_blake2b_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _blake2_blake2b_copy_impl(self); + return _blake2_blake2b_copy_impl((Blake2Object *)self); } PyDoc_STRVAR(_blake2_blake2b_update__doc__, @@ -439,9 +439,9 @@ static PyObject * _blake2_blake2b_digest_impl(Blake2Object *self); static PyObject * -_blake2_blake2b_digest(Blake2Object *self, PyObject *Py_UNUSED(ignored)) +_blake2_blake2b_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _blake2_blake2b_digest_impl(self); + return _blake2_blake2b_digest_impl((Blake2Object *)self); } PyDoc_STRVAR(_blake2_blake2b_hexdigest__doc__, @@ -457,8 +457,8 @@ static PyObject * _blake2_blake2b_hexdigest_impl(Blake2Object *self); static PyObject * -_blake2_blake2b_hexdigest(Blake2Object *self, PyObject *Py_UNUSED(ignored)) +_blake2_blake2b_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _blake2_blake2b_hexdigest_impl(self); + return _blake2_blake2b_hexdigest_impl((Blake2Object *)self); } -/*[clinic end generated code: output=e0aaaf112d023b79 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6e03c947b7e0d973 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/md5module.c.h b/Modules/clinic/md5module.c.h index 7721616862ed0d..1f0acebf47b6ff 100644 --- a/Modules/clinic/md5module.c.h +++ b/Modules/clinic/md5module.c.h @@ -21,13 +21,13 @@ static PyObject * MD5Type_copy_impl(MD5object *self, PyTypeObject *cls); static PyObject * -MD5Type_copy(MD5object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +MD5Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return MD5Type_copy_impl(self, cls); + return MD5Type_copy_impl((MD5object *)self, cls); } PyDoc_STRVAR(MD5Type_digest__doc__, @@ -43,9 +43,9 @@ static PyObject * MD5Type_digest_impl(MD5object *self); static PyObject * -MD5Type_digest(MD5object *self, PyObject *Py_UNUSED(ignored)) +MD5Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return MD5Type_digest_impl(self); + return MD5Type_digest_impl((MD5object *)self); } PyDoc_STRVAR(MD5Type_hexdigest__doc__, @@ -61,9 +61,9 @@ static PyObject * MD5Type_hexdigest_impl(MD5object *self); static PyObject * -MD5Type_hexdigest(MD5object *self, PyObject *Py_UNUSED(ignored)) +MD5Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return MD5Type_hexdigest_impl(self); + return MD5Type_hexdigest_impl((MD5object *)self); } PyDoc_STRVAR(MD5Type_update__doc__, @@ -149,4 +149,4 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw exit: return return_value; } -/*[clinic end generated code: output=62ebf28802ae8b5f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a4292eab710dcb60 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/overlapped.c.h b/Modules/clinic/overlapped.c.h index 9d5adb5193f297..7e5715660022c1 100644 --- a/Modules/clinic/overlapped.c.h +++ b/Modules/clinic/overlapped.c.h @@ -516,9 +516,9 @@ static PyObject * _overlapped_Overlapped_cancel_impl(OverlappedObject *self); static PyObject * -_overlapped_Overlapped_cancel(OverlappedObject *self, PyObject *Py_UNUSED(ignored)) +_overlapped_Overlapped_cancel(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _overlapped_Overlapped_cancel_impl(self); + return _overlapped_Overlapped_cancel_impl((OverlappedObject *)self); } PyDoc_STRVAR(_overlapped_Overlapped_getresult__doc__, @@ -537,7 +537,7 @@ static PyObject * _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait); static PyObject * -_overlapped_Overlapped_getresult(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_getresult(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; BOOL wait = FALSE; @@ -553,7 +553,7 @@ _overlapped_Overlapped_getresult(OverlappedObject *self, PyObject *const *args, goto exit; } skip_optional: - return_value = _overlapped_Overlapped_getresult_impl(self, wait); + return_value = _overlapped_Overlapped_getresult_impl((OverlappedObject *)self, wait); exit: return return_value; @@ -573,7 +573,7 @@ _overlapped_Overlapped_ReadFile_impl(OverlappedObject *self, HANDLE handle, DWORD size); static PyObject * -_overlapped_Overlapped_ReadFile(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_ReadFile(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -589,7 +589,7 @@ _overlapped_Overlapped_ReadFile(OverlappedObject *self, PyObject *const *args, P if (!_PyLong_UnsignedLong_Converter(args[1], &size)) { goto exit; } - return_value = _overlapped_Overlapped_ReadFile_impl(self, handle, size); + return_value = _overlapped_Overlapped_ReadFile_impl((OverlappedObject *)self, handle, size); exit: return return_value; @@ -609,7 +609,7 @@ _overlapped_Overlapped_ReadFileInto_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj); static PyObject * -_overlapped_Overlapped_ReadFileInto(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_ReadFileInto(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -625,7 +625,7 @@ _overlapped_Overlapped_ReadFileInto(OverlappedObject *self, PyObject *const *arg if (PyObject_GetBuffer(args[1], &bufobj, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _overlapped_Overlapped_ReadFileInto_impl(self, handle, &bufobj); + return_value = _overlapped_Overlapped_ReadFileInto_impl((OverlappedObject *)self, handle, &bufobj); exit: /* Cleanup for bufobj */ @@ -650,7 +650,7 @@ _overlapped_Overlapped_WSARecv_impl(OverlappedObject *self, HANDLE handle, DWORD size, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecv(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecv(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -674,7 +674,7 @@ _overlapped_Overlapped_WSARecv(OverlappedObject *self, PyObject *const *args, Py goto exit; } skip_optional: - return_value = _overlapped_Overlapped_WSARecv_impl(self, handle, size, flags); + return_value = _overlapped_Overlapped_WSARecv_impl((OverlappedObject *)self, handle, size, flags); exit: return return_value; @@ -695,7 +695,7 @@ _overlapped_Overlapped_WSARecvInto_impl(OverlappedObject *self, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecvInto(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecvInto(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -715,7 +715,7 @@ _overlapped_Overlapped_WSARecvInto(OverlappedObject *self, PyObject *const *args if (!_PyLong_UnsignedLong_Converter(args[2], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_WSARecvInto_impl(self, handle, &bufobj, flags); + return_value = _overlapped_Overlapped_WSARecvInto_impl((OverlappedObject *)self, handle, &bufobj, flags); exit: /* Cleanup for bufobj */ @@ -740,7 +740,7 @@ _overlapped_Overlapped_WriteFile_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj); static PyObject * -_overlapped_Overlapped_WriteFile(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WriteFile(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -756,7 +756,7 @@ _overlapped_Overlapped_WriteFile(OverlappedObject *self, PyObject *const *args, if (PyObject_GetBuffer(args[1], &bufobj, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _overlapped_Overlapped_WriteFile_impl(self, handle, &bufobj); + return_value = _overlapped_Overlapped_WriteFile_impl((OverlappedObject *)self, handle, &bufobj); exit: /* Cleanup for bufobj */ @@ -781,7 +781,7 @@ _overlapped_Overlapped_WSASend_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj, DWORD flags); static PyObject * -_overlapped_Overlapped_WSASend(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSASend(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -801,7 +801,7 @@ _overlapped_Overlapped_WSASend(OverlappedObject *self, PyObject *const *args, Py if (!_PyLong_UnsignedLong_Converter(args[2], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_WSASend_impl(self, handle, &bufobj, flags); + return_value = _overlapped_Overlapped_WSASend_impl((OverlappedObject *)self, handle, &bufobj, flags); exit: /* Cleanup for bufobj */ @@ -827,7 +827,7 @@ _overlapped_Overlapped_AcceptEx_impl(OverlappedObject *self, HANDLE AcceptSocket); static PyObject * -_overlapped_Overlapped_AcceptEx(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_AcceptEx(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE ListenSocket; @@ -844,7 +844,7 @@ _overlapped_Overlapped_AcceptEx(OverlappedObject *self, PyObject *const *args, P if (!AcceptSocket && PyErr_Occurred()) { goto exit; } - return_value = _overlapped_Overlapped_AcceptEx_impl(self, ListenSocket, AcceptSocket); + return_value = _overlapped_Overlapped_AcceptEx_impl((OverlappedObject *)self, ListenSocket, AcceptSocket); exit: return return_value; @@ -867,7 +867,7 @@ _overlapped_Overlapped_ConnectEx_impl(OverlappedObject *self, PyObject *AddressObj); static PyObject * -_overlapped_Overlapped_ConnectEx(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_ConnectEx(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE ConnectSocket; @@ -885,7 +885,7 @@ _overlapped_Overlapped_ConnectEx(OverlappedObject *self, PyObject *const *args, goto exit; } AddressObj = args[1]; - return_value = _overlapped_Overlapped_ConnectEx_impl(self, ConnectSocket, AddressObj); + return_value = _overlapped_Overlapped_ConnectEx_impl((OverlappedObject *)self, ConnectSocket, AddressObj); exit: return return_value; @@ -904,7 +904,7 @@ _overlapped_Overlapped_DisconnectEx_impl(OverlappedObject *self, HANDLE Socket, DWORD flags); static PyObject * -_overlapped_Overlapped_DisconnectEx(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_DisconnectEx(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE Socket; @@ -920,7 +920,7 @@ _overlapped_Overlapped_DisconnectEx(OverlappedObject *self, PyObject *const *arg if (!_PyLong_UnsignedLong_Converter(args[1], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_DisconnectEx_impl(self, Socket, flags); + return_value = _overlapped_Overlapped_DisconnectEx_impl((OverlappedObject *)self, Socket, flags); exit: return return_value; @@ -944,7 +944,7 @@ _overlapped_Overlapped_TransmitFile_impl(OverlappedObject *self, DWORD count_per_send, DWORD flags); static PyObject * -_overlapped_Overlapped_TransmitFile(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_TransmitFile(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE Socket; @@ -981,7 +981,7 @@ _overlapped_Overlapped_TransmitFile(OverlappedObject *self, PyObject *const *arg if (!_PyLong_UnsignedLong_Converter(args[6], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_TransmitFile_impl(self, Socket, File, offset, offset_high, count_to_write, count_per_send, flags); + return_value = _overlapped_Overlapped_TransmitFile_impl((OverlappedObject *)self, Socket, File, offset, offset_high, count_to_write, count_per_send, flags); exit: return return_value; @@ -1001,7 +1001,7 @@ _overlapped_Overlapped_ConnectNamedPipe_impl(OverlappedObject *self, HANDLE Pipe); static PyObject * -_overlapped_Overlapped_ConnectNamedPipe(OverlappedObject *self, PyObject *arg) +_overlapped_Overlapped_ConnectNamedPipe(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; HANDLE Pipe; @@ -1010,7 +1010,7 @@ _overlapped_Overlapped_ConnectNamedPipe(OverlappedObject *self, PyObject *arg) if (!Pipe && PyErr_Occurred()) { goto exit; } - return_value = _overlapped_Overlapped_ConnectNamedPipe_impl(self, Pipe); + return_value = _overlapped_Overlapped_ConnectNamedPipe_impl((OverlappedObject *)self, Pipe); exit: return return_value; @@ -1030,7 +1030,7 @@ _overlapped_Overlapped_ConnectPipe_impl(OverlappedObject *self, const wchar_t *Address); static PyObject * -_overlapped_Overlapped_ConnectPipe(OverlappedObject *self, PyObject *arg) +_overlapped_Overlapped_ConnectPipe(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const wchar_t *Address = NULL; @@ -1043,7 +1043,7 @@ _overlapped_Overlapped_ConnectPipe(OverlappedObject *self, PyObject *arg) if (Address == NULL) { goto exit; } - return_value = _overlapped_Overlapped_ConnectPipe_impl(self, Address); + return_value = _overlapped_Overlapped_ConnectPipe_impl((OverlappedObject *)self, Address); exit: /* Cleanup for Address */ @@ -1105,7 +1105,7 @@ _overlapped_Overlapped_WSASendTo_impl(OverlappedObject *self, HANDLE handle, PyObject *AddressObj); static PyObject * -_overlapped_Overlapped_WSASendTo(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSASendTo(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -1131,7 +1131,7 @@ _overlapped_Overlapped_WSASendTo(OverlappedObject *self, PyObject *const *args, goto exit; } AddressObj = args[3]; - return_value = _overlapped_Overlapped_WSASendTo_impl(self, handle, &bufobj, flags, AddressObj); + return_value = _overlapped_Overlapped_WSASendTo_impl((OverlappedObject *)self, handle, &bufobj, flags, AddressObj); exit: /* Cleanup for bufobj */ @@ -1157,7 +1157,7 @@ _overlapped_Overlapped_WSARecvFrom_impl(OverlappedObject *self, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecvFrom(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecvFrom(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -1181,7 +1181,7 @@ _overlapped_Overlapped_WSARecvFrom(OverlappedObject *self, PyObject *const *args goto exit; } skip_optional: - return_value = _overlapped_Overlapped_WSARecvFrom_impl(self, handle, size, flags); + return_value = _overlapped_Overlapped_WSARecvFrom_impl((OverlappedObject *)self, handle, size, flags); exit: return return_value; @@ -1202,7 +1202,7 @@ _overlapped_Overlapped_WSARecvFromInto_impl(OverlappedObject *self, DWORD size, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecvFromInto(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -1230,7 +1230,7 @@ _overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const * goto exit; } skip_optional: - return_value = _overlapped_Overlapped_WSARecvFromInto_impl(self, handle, &bufobj, size, flags); + return_value = _overlapped_Overlapped_WSARecvFromInto_impl((OverlappedObject *)self, handle, &bufobj, size, flags); exit: /* Cleanup for bufobj */ @@ -1240,4 +1240,4 @@ _overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const * return return_value; } -/*[clinic end generated code: output=14c4f87906f28dc5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d009cc9e53d9732a input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 554299b8598299..abeb9c3e3e12b1 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -309,7 +309,7 @@ os_access(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k return return_value; } -#if defined(HAVE_TTYNAME) +#if defined(HAVE_TTYNAME_R) PyDoc_STRVAR(os_ttyname__doc__, "ttyname($module, fd, /)\n" @@ -342,7 +342,7 @@ os_ttyname(PyObject *module, PyObject *arg) return return_value; } -#endif /* defined(HAVE_TTYNAME) */ +#endif /* defined(HAVE_TTYNAME_R) */ #if defined(HAVE_CTERMID) @@ -7577,6 +7577,62 @@ os_read(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } +PyDoc_STRVAR(os_readinto__doc__, +"readinto($module, fd, buffer, /)\n" +"--\n" +"\n" +"Read into a buffer object from a file descriptor.\n" +"\n" +"The buffer should be mutable and bytes-like. On success, returns the number of\n" +"bytes read. Less bytes may be read than the size of the buffer. The underlying\n" +"system call will be retried when interrupted by a signal, unless the signal\n" +"handler raises an exception. Other errors will not be retried and an error will\n" +"be raised.\n" +"\n" +"Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0\n" +"(which can be used to check for errors without reading data). Never returns\n" +"negative."); + +#define OS_READINTO_METHODDEF \ + {"readinto", _PyCFunction_CAST(os_readinto), METH_FASTCALL, os_readinto__doc__}, + +static Py_ssize_t +os_readinto_impl(PyObject *module, int fd, Py_buffer *buffer); + +static PyObject * +os_readinto(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int fd; + Py_buffer buffer = {NULL, NULL}; + Py_ssize_t _return_value; + + if (!_PyArg_CheckPositional("readinto", nargs, 2, 2)) { + goto exit; + } + fd = PyLong_AsInt(args[0]); + if (fd == -1 && PyErr_Occurred()) { + goto exit; + } + if (PyObject_GetBuffer(args[1], &buffer, PyBUF_WRITABLE) < 0) { + _PyArg_BadArgument("readinto", "argument 2", "read-write bytes-like object", args[1]); + goto exit; + } + _return_value = os_readinto_impl(module, fd, &buffer); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromSsize_t(_return_value); + +exit: + /* Cleanup for buffer */ + if (buffer.obj) { + PyBuffer_Release(&buffer); + } + + return return_value; +} + #if defined(HAVE_READV) PyDoc_STRVAR(os_readv__doc__, @@ -11662,7 +11718,7 @@ static int os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class); static PyObject * -os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_symlink(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; int _return_value; @@ -11671,7 +11727,7 @@ os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *c PyErr_SetString(PyExc_TypeError, "is_symlink() takes no arguments"); goto exit; } - _return_value = os_DirEntry_is_symlink_impl(self, defining_class); + _return_value = os_DirEntry_is_symlink_impl((DirEntry *)self, defining_class); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11694,12 +11750,12 @@ static int os_DirEntry_is_junction_impl(DirEntry *self); static PyObject * -os_DirEntry_is_junction(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_is_junction(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; int _return_value; - _return_value = os_DirEntry_is_junction_impl(self); + _return_value = os_DirEntry_is_junction_impl((DirEntry *)self); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11723,7 +11779,7 @@ os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, int follow_symlinks); static PyObject * -os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_stat(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -11768,7 +11824,7 @@ os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const * goto exit; } skip_optional_kwonly: - return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); + return_value = os_DirEntry_stat_impl((DirEntry *)self, defining_class, follow_symlinks); exit: return return_value; @@ -11788,7 +11844,7 @@ os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, int follow_symlinks); static PyObject * -os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_dir(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -11834,7 +11890,7 @@ os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const goto exit; } skip_optional_kwonly: - _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks); + _return_value = os_DirEntry_is_dir_impl((DirEntry *)self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11858,7 +11914,7 @@ os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, int follow_symlinks); static PyObject * -os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_file(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -11904,7 +11960,7 @@ os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *cons goto exit; } skip_optional_kwonly: - _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks); + _return_value = os_DirEntry_is_file_impl((DirEntry *)self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11927,9 +11983,9 @@ static PyObject * os_DirEntry_inode_impl(DirEntry *self); static PyObject * -os_DirEntry_inode(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_inode(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return os_DirEntry_inode_impl(self); + return os_DirEntry_inode_impl((DirEntry *)self); } PyDoc_STRVAR(os_DirEntry___fspath____doc__, @@ -11945,9 +12001,9 @@ static PyObject * os_DirEntry___fspath___impl(DirEntry *self); static PyObject * -os_DirEntry___fspath__(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry___fspath__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return os_DirEntry___fspath___impl(self); + return os_DirEntry___fspath___impl((DirEntry *)self); } PyDoc_STRVAR(os_scandir__doc__, @@ -13140,4 +13196,4 @@ os__emscripten_debugger(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__EMSCRIPTEN_DEBUGGER_METHODDEF #define OS__EMSCRIPTEN_DEBUGGER_METHODDEF #endif /* !defined(OS__EMSCRIPTEN_DEBUGGER_METHODDEF) */ -/*[clinic end generated code: output=9c2ca1dbf986c62c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8318c26fc2cd236c input=a9049054013a1b77]*/ diff --git a/Modules/clinic/pyexpat.c.h b/Modules/clinic/pyexpat.c.h index e57aa8a07d78c7..9eba59731c3fba 100644 --- a/Modules/clinic/pyexpat.c.h +++ b/Modules/clinic/pyexpat.c.h @@ -22,7 +22,7 @@ pyexpat_xmlparser_SetReparseDeferralEnabled_impl(xmlparseobject *self, int enabled); static PyObject * -pyexpat_xmlparser_SetReparseDeferralEnabled(xmlparseobject *self, PyObject *arg) +pyexpat_xmlparser_SetReparseDeferralEnabled(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int enabled; @@ -31,7 +31,7 @@ pyexpat_xmlparser_SetReparseDeferralEnabled(xmlparseobject *self, PyObject *arg) if (enabled < 0) { goto exit; } - return_value = pyexpat_xmlparser_SetReparseDeferralEnabled_impl(self, enabled); + return_value = pyexpat_xmlparser_SetReparseDeferralEnabled_impl((xmlparseobject *)self, enabled); exit: return return_value; @@ -50,9 +50,9 @@ static PyObject * pyexpat_xmlparser_GetReparseDeferralEnabled_impl(xmlparseobject *self); static PyObject * -pyexpat_xmlparser_GetReparseDeferralEnabled(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +pyexpat_xmlparser_GetReparseDeferralEnabled(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pyexpat_xmlparser_GetReparseDeferralEnabled_impl(self); + return pyexpat_xmlparser_GetReparseDeferralEnabled_impl((xmlparseobject *)self); } PyDoc_STRVAR(pyexpat_xmlparser_Parse__doc__, @@ -71,7 +71,7 @@ pyexpat_xmlparser_Parse_impl(xmlparseobject *self, PyTypeObject *cls, PyObject *data, int isfinal); static PyObject * -pyexpat_xmlparser_Parse(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_Parse(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -105,7 +105,7 @@ pyexpat_xmlparser_Parse(xmlparseobject *self, PyTypeObject *cls, PyObject *const goto exit; } skip_optional_posonly: - return_value = pyexpat_xmlparser_Parse_impl(self, cls, data, isfinal); + return_value = pyexpat_xmlparser_Parse_impl((xmlparseobject *)self, cls, data, isfinal); exit: return return_value; @@ -125,7 +125,7 @@ pyexpat_xmlparser_ParseFile_impl(xmlparseobject *self, PyTypeObject *cls, PyObject *file); static PyObject * -pyexpat_xmlparser_ParseFile(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_ParseFile(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -150,7 +150,7 @@ pyexpat_xmlparser_ParseFile(xmlparseobject *self, PyTypeObject *cls, PyObject *c goto exit; } file = args[0]; - return_value = pyexpat_xmlparser_ParseFile_impl(self, cls, file); + return_value = pyexpat_xmlparser_ParseFile_impl((xmlparseobject *)self, cls, file); exit: return return_value; @@ -169,7 +169,7 @@ static PyObject * pyexpat_xmlparser_SetBase_impl(xmlparseobject *self, const char *base); static PyObject * -pyexpat_xmlparser_SetBase(xmlparseobject *self, PyObject *arg) +pyexpat_xmlparser_SetBase(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *base; @@ -187,7 +187,7 @@ pyexpat_xmlparser_SetBase(xmlparseobject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = pyexpat_xmlparser_SetBase_impl(self, base); + return_value = pyexpat_xmlparser_SetBase_impl((xmlparseobject *)self, base); exit: return return_value; @@ -206,9 +206,9 @@ static PyObject * pyexpat_xmlparser_GetBase_impl(xmlparseobject *self); static PyObject * -pyexpat_xmlparser_GetBase(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +pyexpat_xmlparser_GetBase(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pyexpat_xmlparser_GetBase_impl(self); + return pyexpat_xmlparser_GetBase_impl((xmlparseobject *)self); } PyDoc_STRVAR(pyexpat_xmlparser_GetInputContext__doc__, @@ -227,9 +227,9 @@ static PyObject * pyexpat_xmlparser_GetInputContext_impl(xmlparseobject *self); static PyObject * -pyexpat_xmlparser_GetInputContext(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +pyexpat_xmlparser_GetInputContext(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pyexpat_xmlparser_GetInputContext_impl(self); + return pyexpat_xmlparser_GetInputContext_impl((xmlparseobject *)self); } PyDoc_STRVAR(pyexpat_xmlparser_ExternalEntityParserCreate__doc__, @@ -249,7 +249,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate_impl(xmlparseobject *self, const char *encoding); static PyObject * -pyexpat_xmlparser_ExternalEntityParserCreate(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_ExternalEntityParserCreate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -309,7 +309,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate(xmlparseobject *self, PyTypeObject goto exit; } skip_optional_posonly: - return_value = pyexpat_xmlparser_ExternalEntityParserCreate_impl(self, cls, context, encoding); + return_value = pyexpat_xmlparser_ExternalEntityParserCreate_impl((xmlparseobject *)self, cls, context, encoding); exit: return return_value; @@ -333,7 +333,7 @@ static PyObject * pyexpat_xmlparser_SetParamEntityParsing_impl(xmlparseobject *self, int flag); static PyObject * -pyexpat_xmlparser_SetParamEntityParsing(xmlparseobject *self, PyObject *arg) +pyexpat_xmlparser_SetParamEntityParsing(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int flag; @@ -342,7 +342,7 @@ pyexpat_xmlparser_SetParamEntityParsing(xmlparseobject *self, PyObject *arg) if (flag == -1 && PyErr_Occurred()) { goto exit; } - return_value = pyexpat_xmlparser_SetParamEntityParsing_impl(self, flag); + return_value = pyexpat_xmlparser_SetParamEntityParsing_impl((xmlparseobject *)self, flag); exit: return return_value; @@ -368,7 +368,7 @@ pyexpat_xmlparser_UseForeignDTD_impl(xmlparseobject *self, PyTypeObject *cls, int flag); static PyObject * -pyexpat_xmlparser_UseForeignDTD(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_UseForeignDTD(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -400,7 +400,7 @@ pyexpat_xmlparser_UseForeignDTD(xmlparseobject *self, PyTypeObject *cls, PyObjec goto exit; } skip_optional_posonly: - return_value = pyexpat_xmlparser_UseForeignDTD_impl(self, cls, flag); + return_value = pyexpat_xmlparser_UseForeignDTD_impl((xmlparseobject *)self, cls, flag); exit: return return_value; @@ -550,4 +550,4 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg) #ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */ -/*[clinic end generated code: output=63be65cb1823b5f8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7ee30ae5b666d0a8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index 806a888d6b8cd9..d8bdd6f95f3d29 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -91,7 +91,7 @@ static PyObject * select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask); static PyObject * -select_poll_register(pollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_poll_register(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -112,7 +112,7 @@ select_poll_register(pollObject *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_register_impl(self, fd, eventmask); + return_value = select_poll_register_impl((pollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -142,7 +142,7 @@ static PyObject * select_poll_modify_impl(pollObject *self, int fd, unsigned short eventmask); static PyObject * -select_poll_modify(pollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_poll_modify(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -159,7 +159,7 @@ select_poll_modify(pollObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_modify_impl(self, fd, eventmask); + return_value = select_poll_modify_impl((pollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -183,7 +183,7 @@ static PyObject * select_poll_unregister_impl(pollObject *self, int fd); static PyObject * -select_poll_unregister(pollObject *self, PyObject *arg) +select_poll_unregister(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int fd; @@ -193,7 +193,7 @@ select_poll_unregister(pollObject *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_unregister_impl(self, fd); + return_value = select_poll_unregister_impl((pollObject *)self, fd); Py_END_CRITICAL_SECTION(); exit: @@ -224,7 +224,7 @@ static PyObject * select_poll_poll_impl(pollObject *self, PyObject *timeout_obj); static PyObject * -select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_poll_poll(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *timeout_obj = Py_None; @@ -238,7 +238,7 @@ select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) timeout_obj = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_poll_impl(self, timeout_obj); + return_value = select_poll_poll_impl((pollObject *)self, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -270,7 +270,7 @@ select_devpoll_register_impl(devpollObject *self, int fd, unsigned short eventmask); static PyObject * -select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_devpoll_register(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -291,7 +291,7 @@ select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t n } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_register_impl(self, fd, eventmask); + return_value = select_devpoll_register_impl((devpollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -323,7 +323,7 @@ select_devpoll_modify_impl(devpollObject *self, int fd, unsigned short eventmask); static PyObject * -select_devpoll_modify(devpollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_devpoll_modify(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -344,7 +344,7 @@ select_devpoll_modify(devpollObject *self, PyObject *const *args, Py_ssize_t nar } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_modify_impl(self, fd, eventmask); + return_value = select_devpoll_modify_impl((devpollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -368,7 +368,7 @@ static PyObject * select_devpoll_unregister_impl(devpollObject *self, int fd); static PyObject * -select_devpoll_unregister(devpollObject *self, PyObject *arg) +select_devpoll_unregister(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int fd; @@ -378,7 +378,7 @@ select_devpoll_unregister(devpollObject *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_unregister_impl(self, fd); + return_value = select_devpoll_unregister_impl((devpollObject *)self, fd); Py_END_CRITICAL_SECTION(); exit: @@ -409,7 +409,7 @@ static PyObject * select_devpoll_poll_impl(devpollObject *self, PyObject *timeout_obj); static PyObject * -select_devpoll_poll(devpollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_devpoll_poll(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *timeout_obj = Py_None; @@ -423,7 +423,7 @@ select_devpoll_poll(devpollObject *self, PyObject *const *args, Py_ssize_t nargs timeout_obj = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_poll_impl(self, timeout_obj); + return_value = select_devpoll_poll_impl((devpollObject *)self, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -449,12 +449,12 @@ static PyObject * select_devpoll_close_impl(devpollObject *self); static PyObject * -select_devpoll_close(devpollObject *self, PyObject *Py_UNUSED(ignored)) +select_devpoll_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_close_impl(self); + return_value = select_devpoll_close_impl((devpollObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -477,12 +477,12 @@ static PyObject * select_devpoll_fileno_impl(devpollObject *self); static PyObject * -select_devpoll_fileno(devpollObject *self, PyObject *Py_UNUSED(ignored)) +select_devpoll_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_fileno_impl(self); + return_value = select_devpoll_fileno_impl((devpollObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -643,12 +643,12 @@ static PyObject * select_epoll_close_impl(pyEpoll_Object *self); static PyObject * -select_epoll_close(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) +select_epoll_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_epoll_close_impl(self); + return_value = select_epoll_close_impl((pyEpoll_Object *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -671,9 +671,9 @@ static PyObject * select_epoll_fileno_impl(pyEpoll_Object *self); static PyObject * -select_epoll_fileno(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) +select_epoll_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return select_epoll_fileno_impl(self); + return select_epoll_fileno_impl((pyEpoll_Object *)self); } #endif /* defined(HAVE_EPOLL) */ @@ -734,7 +734,7 @@ select_epoll_register_impl(pyEpoll_Object *self, int fd, unsigned int eventmask); static PyObject * -select_epoll_register(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_register(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -784,7 +784,7 @@ select_epoll_register(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t na goto exit; } skip_optional_pos: - return_value = select_epoll_register_impl(self, fd, eventmask); + return_value = select_epoll_register_impl((pyEpoll_Object *)self, fd, eventmask); exit: return return_value; @@ -813,7 +813,7 @@ select_epoll_modify_impl(pyEpoll_Object *self, int fd, unsigned int eventmask); static PyObject * -select_epoll_modify(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_modify(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -858,7 +858,7 @@ select_epoll_modify(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t narg if (eventmask == (unsigned int)-1 && PyErr_Occurred()) { goto exit; } - return_value = select_epoll_modify_impl(self, fd, eventmask); + return_value = select_epoll_modify_impl((pyEpoll_Object *)self, fd, eventmask); exit: return return_value; @@ -884,7 +884,7 @@ static PyObject * select_epoll_unregister_impl(pyEpoll_Object *self, int fd); static PyObject * -select_epoll_unregister(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_unregister(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -924,7 +924,7 @@ select_epoll_unregister(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t if (fd < 0) { goto exit; } - return_value = select_epoll_unregister_impl(self, fd); + return_value = select_epoll_unregister_impl((pyEpoll_Object *)self, fd); exit: return return_value; @@ -957,7 +957,7 @@ select_epoll_poll_impl(pyEpoll_Object *self, PyObject *timeout_obj, int maxevents); static PyObject * -select_epoll_poll(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_poll(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1009,7 +1009,7 @@ select_epoll_poll(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } skip_optional_pos: - return_value = select_epoll_poll_impl(self, timeout_obj, maxevents); + return_value = select_epoll_poll_impl((pyEpoll_Object *)self, timeout_obj, maxevents); exit: return return_value; @@ -1031,9 +1031,9 @@ static PyObject * select_epoll___enter___impl(pyEpoll_Object *self); static PyObject * -select_epoll___enter__(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) +select_epoll___enter__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return select_epoll___enter___impl(self); + return select_epoll___enter___impl((pyEpoll_Object *)self); } #endif /* defined(HAVE_EPOLL) */ @@ -1053,7 +1053,7 @@ select_epoll___exit___impl(pyEpoll_Object *self, PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb); static PyObject * -select_epoll___exit__(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs) +select_epoll___exit__(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type = Py_None; @@ -1076,7 +1076,7 @@ select_epoll___exit__(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t na } exc_tb = args[2]; skip_optional: - return_value = select_epoll___exit___impl(self, exc_type, exc_value, exc_tb); + return_value = select_epoll___exit___impl((pyEpoll_Object *)self, exc_type, exc_value, exc_tb); exit: return return_value; @@ -1146,12 +1146,12 @@ static PyObject * select_kqueue_close_impl(kqueue_queue_Object *self); static PyObject * -select_kqueue_close(kqueue_queue_Object *self, PyObject *Py_UNUSED(ignored)) +select_kqueue_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_kqueue_close_impl(self); + return_value = select_kqueue_close_impl((kqueue_queue_Object *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1174,9 +1174,9 @@ static PyObject * select_kqueue_fileno_impl(kqueue_queue_Object *self); static PyObject * -select_kqueue_fileno(kqueue_queue_Object *self, PyObject *Py_UNUSED(ignored)) +select_kqueue_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return select_kqueue_fileno_impl(self); + return select_kqueue_fileno_impl((kqueue_queue_Object *)self); } #endif /* defined(HAVE_KQUEUE) */ @@ -1238,7 +1238,7 @@ select_kqueue_control_impl(kqueue_queue_Object *self, PyObject *changelist, int maxevents, PyObject *otimeout); static PyObject * -select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize_t nargs) +select_kqueue_control(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *changelist; @@ -1258,7 +1258,7 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize } otimeout = args[2]; skip_optional: - return_value = select_kqueue_control_impl(self, changelist, maxevents, otimeout); + return_value = select_kqueue_control_impl((kqueue_queue_Object *)self, changelist, maxevents, otimeout); exit: return return_value; @@ -1365,4 +1365,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=78b4e67f7d401b5e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c18fd93efc5f4dce input=a9049054013a1b77]*/ diff --git a/Modules/clinic/sha1module.c.h b/Modules/clinic/sha1module.c.h index 6af77ba64ecce6..ddd8e66a41d7ff 100644 --- a/Modules/clinic/sha1module.c.h +++ b/Modules/clinic/sha1module.c.h @@ -21,13 +21,13 @@ static PyObject * SHA1Type_copy_impl(SHA1object *self, PyTypeObject *cls); static PyObject * -SHA1Type_copy(SHA1object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +SHA1Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return SHA1Type_copy_impl(self, cls); + return SHA1Type_copy_impl((SHA1object *)self, cls); } PyDoc_STRVAR(SHA1Type_digest__doc__, @@ -43,9 +43,9 @@ static PyObject * SHA1Type_digest_impl(SHA1object *self); static PyObject * -SHA1Type_digest(SHA1object *self, PyObject *Py_UNUSED(ignored)) +SHA1Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA1Type_digest_impl(self); + return SHA1Type_digest_impl((SHA1object *)self); } PyDoc_STRVAR(SHA1Type_hexdigest__doc__, @@ -61,9 +61,9 @@ static PyObject * SHA1Type_hexdigest_impl(SHA1object *self); static PyObject * -SHA1Type_hexdigest(SHA1object *self, PyObject *Py_UNUSED(ignored)) +SHA1Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA1Type_hexdigest_impl(self); + return SHA1Type_hexdigest_impl((SHA1object *)self); } PyDoc_STRVAR(SHA1Type_update__doc__, @@ -149,4 +149,4 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * exit: return return_value; } -/*[clinic end generated code: output=917e2789f1f5ebf9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ad6f3788a6e7ff6f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/sha2module.c.h b/Modules/clinic/sha2module.c.h index fec655a0dfaa58..d86f5510d752e8 100644 --- a/Modules/clinic/sha2module.c.h +++ b/Modules/clinic/sha2module.c.h @@ -21,13 +21,13 @@ static PyObject * SHA256Type_copy_impl(SHA256object *self, PyTypeObject *cls); static PyObject * -SHA256Type_copy(SHA256object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +SHA256Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return SHA256Type_copy_impl(self, cls); + return SHA256Type_copy_impl((SHA256object *)self, cls); } PyDoc_STRVAR(SHA512Type_copy__doc__, @@ -43,13 +43,13 @@ static PyObject * SHA512Type_copy_impl(SHA512object *self, PyTypeObject *cls); static PyObject * -SHA512Type_copy(SHA512object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +SHA512Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return SHA512Type_copy_impl(self, cls); + return SHA512Type_copy_impl((SHA512object *)self, cls); } PyDoc_STRVAR(SHA256Type_digest__doc__, @@ -65,9 +65,9 @@ static PyObject * SHA256Type_digest_impl(SHA256object *self); static PyObject * -SHA256Type_digest(SHA256object *self, PyObject *Py_UNUSED(ignored)) +SHA256Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA256Type_digest_impl(self); + return SHA256Type_digest_impl((SHA256object *)self); } PyDoc_STRVAR(SHA512Type_digest__doc__, @@ -83,9 +83,9 @@ static PyObject * SHA512Type_digest_impl(SHA512object *self); static PyObject * -SHA512Type_digest(SHA512object *self, PyObject *Py_UNUSED(ignored)) +SHA512Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA512Type_digest_impl(self); + return SHA512Type_digest_impl((SHA512object *)self); } PyDoc_STRVAR(SHA256Type_hexdigest__doc__, @@ -101,9 +101,9 @@ static PyObject * SHA256Type_hexdigest_impl(SHA256object *self); static PyObject * -SHA256Type_hexdigest(SHA256object *self, PyObject *Py_UNUSED(ignored)) +SHA256Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA256Type_hexdigest_impl(self); + return SHA256Type_hexdigest_impl((SHA256object *)self); } PyDoc_STRVAR(SHA512Type_hexdigest__doc__, @@ -119,9 +119,9 @@ static PyObject * SHA512Type_hexdigest_impl(SHA512object *self); static PyObject * -SHA512Type_hexdigest(SHA512object *self, PyObject *Py_UNUSED(ignored)) +SHA512Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA512Type_hexdigest_impl(self); + return SHA512Type_hexdigest_impl((SHA512object *)self); } PyDoc_STRVAR(SHA256Type_update__doc__, @@ -441,4 +441,4 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject exit: return return_value; } -/*[clinic end generated code: output=602a6939b8ec0927 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1d7fec114eb6b6e3 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/sha3module.c.h b/Modules/clinic/sha3module.c.h index d9f4b66f81a038..729e216ce023cf 100644 --- a/Modules/clinic/sha3module.c.h +++ b/Modules/clinic/sha3module.c.h @@ -92,9 +92,9 @@ static PyObject * _sha3_sha3_224_copy_impl(SHA3object *self); static PyObject * -_sha3_sha3_224_copy(SHA3object *self, PyObject *Py_UNUSED(ignored)) +_sha3_sha3_224_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sha3_sha3_224_copy_impl(self); + return _sha3_sha3_224_copy_impl((SHA3object *)self); } PyDoc_STRVAR(_sha3_sha3_224_digest__doc__, @@ -110,9 +110,9 @@ static PyObject * _sha3_sha3_224_digest_impl(SHA3object *self); static PyObject * -_sha3_sha3_224_digest(SHA3object *self, PyObject *Py_UNUSED(ignored)) +_sha3_sha3_224_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sha3_sha3_224_digest_impl(self); + return _sha3_sha3_224_digest_impl((SHA3object *)self); } PyDoc_STRVAR(_sha3_sha3_224_hexdigest__doc__, @@ -128,9 +128,9 @@ static PyObject * _sha3_sha3_224_hexdigest_impl(SHA3object *self); static PyObject * -_sha3_sha3_224_hexdigest(SHA3object *self, PyObject *Py_UNUSED(ignored)) +_sha3_sha3_224_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sha3_sha3_224_hexdigest_impl(self); + return _sha3_sha3_224_hexdigest_impl((SHA3object *)self); } PyDoc_STRVAR(_sha3_sha3_224_update__doc__, @@ -155,7 +155,7 @@ static PyObject * _sha3_shake_128_digest_impl(SHA3object *self, unsigned long length); static PyObject * -_sha3_shake_128_digest(SHA3object *self, PyObject *arg) +_sha3_shake_128_digest(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; unsigned long length; @@ -163,7 +163,7 @@ _sha3_shake_128_digest(SHA3object *self, PyObject *arg) if (!_PyLong_UnsignedLong_Converter(arg, &length)) { goto exit; } - return_value = _sha3_shake_128_digest_impl(self, length); + return_value = _sha3_shake_128_digest_impl((SHA3object *)self, length); exit: return return_value; @@ -182,7 +182,7 @@ static PyObject * _sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length); static PyObject * -_sha3_shake_128_hexdigest(SHA3object *self, PyObject *arg) +_sha3_shake_128_hexdigest(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; unsigned long length; @@ -190,9 +190,9 @@ _sha3_shake_128_hexdigest(SHA3object *self, PyObject *arg) if (!_PyLong_UnsignedLong_Converter(arg, &length)) { goto exit; } - return_value = _sha3_shake_128_hexdigest_impl(self, length); + return_value = _sha3_shake_128_hexdigest_impl((SHA3object *)self, length); exit: return return_value; } -/*[clinic end generated code: output=5c644eb0ed42b993 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=21da06d9570969d8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/socketmodule.c.h b/Modules/clinic/socketmodule.c.h index 2152f288a9722f..dc62c4290d3e3b 100644 --- a/Modules/clinic/socketmodule.c.h +++ b/Modules/clinic/socketmodule.c.h @@ -23,9 +23,9 @@ static PyObject * _socket_socket_close_impl(PySocketSockObject *s); static PyObject * -_socket_socket_close(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +_socket_socket_close(PyObject *s, PyObject *Py_UNUSED(ignored)) { - return _socket_socket_close_impl(s); + return _socket_socket_close_impl((PySocketSockObject *)s); } static int @@ -126,7 +126,7 @@ static PyObject * _socket_socket_ntohs_impl(PySocketSockObject *self, int x); static PyObject * -_socket_socket_ntohs(PySocketSockObject *self, PyObject *arg) +_socket_socket_ntohs(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int x; @@ -135,7 +135,7 @@ _socket_socket_ntohs(PySocketSockObject *self, PyObject *arg) if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _socket_socket_ntohs_impl(self, x); + return_value = _socket_socket_ntohs_impl((PySocketSockObject *)self, x); exit: return return_value; @@ -154,7 +154,7 @@ static PyObject * _socket_socket_htons_impl(PySocketSockObject *self, int x); static PyObject * -_socket_socket_htons(PySocketSockObject *self, PyObject *arg) +_socket_socket_htons(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int x; @@ -163,7 +163,7 @@ _socket_socket_htons(PySocketSockObject *self, PyObject *arg) if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _socket_socket_htons_impl(self, x); + return_value = _socket_socket_htons_impl((PySocketSockObject *)self, x); exit: return return_value; @@ -182,7 +182,7 @@ static PyObject * _socket_socket_inet_aton_impl(PySocketSockObject *self, const char *ip_addr); static PyObject * -_socket_socket_inet_aton(PySocketSockObject *self, PyObject *arg) +_socket_socket_inet_aton(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *ip_addr; @@ -200,7 +200,7 @@ _socket_socket_inet_aton(PySocketSockObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _socket_socket_inet_aton_impl(self, ip_addr); + return_value = _socket_socket_inet_aton_impl((PySocketSockObject *)self, ip_addr); exit: return return_value; @@ -221,7 +221,7 @@ static PyObject * _socket_socket_inet_ntoa_impl(PySocketSockObject *self, Py_buffer *packed_ip); static PyObject * -_socket_socket_inet_ntoa(PySocketSockObject *self, PyObject *arg) +_socket_socket_inet_ntoa(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer packed_ip = {NULL, NULL}; @@ -229,7 +229,7 @@ _socket_socket_inet_ntoa(PySocketSockObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &packed_ip, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _socket_socket_inet_ntoa_impl(self, &packed_ip); + return_value = _socket_socket_inet_ntoa_impl((PySocketSockObject *)self, &packed_ip); exit: /* Cleanup for packed_ip */ @@ -257,7 +257,7 @@ static PyObject * _socket_socket_if_nametoindex_impl(PySocketSockObject *self, PyObject *oname); static PyObject * -_socket_socket_if_nametoindex(PySocketSockObject *self, PyObject *arg) +_socket_socket_if_nametoindex(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *oname; @@ -265,7 +265,7 @@ _socket_socket_if_nametoindex(PySocketSockObject *self, PyObject *arg) if (!PyUnicode_FSConverter(arg, &oname)) { goto exit; } - return_value = _socket_socket_if_nametoindex_impl(self, oname); + return_value = _socket_socket_if_nametoindex_impl((PySocketSockObject *)self, oname); exit: return return_value; @@ -280,4 +280,4 @@ _socket_socket_if_nametoindex(PySocketSockObject *self, PyObject *arg) #ifndef _SOCKET_SOCKET_IF_NAMETOINDEX_METHODDEF #define _SOCKET_SOCKET_IF_NAMETOINDEX_METHODDEF #endif /* !defined(_SOCKET_SOCKET_IF_NAMETOINDEX_METHODDEF) */ -/*[clinic end generated code: output=3e612e8df1c322dd input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d39efc30d811e74b input=a9049054013a1b77]*/ diff --git a/Modules/clinic/zlibmodule.c.h b/Modules/clinic/zlibmodule.c.h index 19906dc328d897..91a3ac76bcf0cc 100644 --- a/Modules/clinic/zlibmodule.c.h +++ b/Modules/clinic/zlibmodule.c.h @@ -439,7 +439,7 @@ zlib_Compress_compress_impl(compobject *self, PyTypeObject *cls, Py_buffer *data); static PyObject * -zlib_Compress_compress(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress_compress(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -466,7 +466,7 @@ zlib_Compress_compress(compobject *self, PyTypeObject *cls, PyObject *const *arg if (PyObject_GetBuffer(args[0], &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = zlib_Compress_compress_impl(self, cls, &data); + return_value = zlib_Compress_compress_impl((compobject *)self, cls, &data); exit: /* Cleanup for data */ @@ -502,7 +502,7 @@ zlib_Decompress_decompress_impl(compobject *self, PyTypeObject *cls, Py_buffer *data, Py_ssize_t max_length); static PyObject * -zlib_Decompress_decompress(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress_decompress(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -559,7 +559,7 @@ zlib_Decompress_decompress(compobject *self, PyTypeObject *cls, PyObject *const max_length = ival; } skip_optional_pos: - return_value = zlib_Decompress_decompress_impl(self, cls, &data, max_length); + return_value = zlib_Decompress_decompress_impl((compobject *)self, cls, &data, max_length); exit: /* Cleanup for data */ @@ -589,7 +589,7 @@ static PyObject * zlib_Compress_flush_impl(compobject *self, PyTypeObject *cls, int mode); static PyObject * -zlib_Compress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress_flush(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -621,7 +621,7 @@ zlib_Compress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args, goto exit; } skip_optional_posonly: - return_value = zlib_Compress_flush_impl(self, cls, mode); + return_value = zlib_Compress_flush_impl((compobject *)self, cls, mode); exit: return return_value; @@ -642,13 +642,13 @@ static PyObject * zlib_Compress_copy_impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Compress_copy(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return zlib_Compress_copy_impl(self, cls); + return zlib_Compress_copy_impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -667,13 +667,13 @@ static PyObject * zlib_Compress___copy___impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Compress___copy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress___copy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__copy__() takes no arguments"); return NULL; } - return zlib_Compress___copy___impl(self, cls); + return zlib_Compress___copy___impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -693,7 +693,7 @@ zlib_Compress___deepcopy___impl(compobject *self, PyTypeObject *cls, PyObject *memo); static PyObject * -zlib_Compress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress___deepcopy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -718,7 +718,7 @@ zlib_Compress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *const goto exit; } memo = args[0]; - return_value = zlib_Compress___deepcopy___impl(self, cls, memo); + return_value = zlib_Compress___deepcopy___impl((compobject *)self, cls, memo); exit: return return_value; @@ -741,13 +741,13 @@ static PyObject * zlib_Decompress_copy_impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Decompress_copy(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return zlib_Decompress_copy_impl(self, cls); + return zlib_Decompress_copy_impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -766,13 +766,13 @@ static PyObject * zlib_Decompress___copy___impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Decompress___copy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress___copy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__copy__() takes no arguments"); return NULL; } - return zlib_Decompress___copy___impl(self, cls); + return zlib_Decompress___copy___impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -792,7 +792,7 @@ zlib_Decompress___deepcopy___impl(compobject *self, PyTypeObject *cls, PyObject *memo); static PyObject * -zlib_Decompress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress___deepcopy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -817,7 +817,7 @@ zlib_Decompress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *cons goto exit; } memo = args[0]; - return_value = zlib_Decompress___deepcopy___impl(self, cls, memo); + return_value = zlib_Decompress___deepcopy___impl((compobject *)self, cls, memo); exit: return return_value; @@ -842,7 +842,7 @@ zlib_Decompress_flush_impl(compobject *self, PyTypeObject *cls, Py_ssize_t length); static PyObject * -zlib_Decompress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress_flush(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -882,7 +882,7 @@ zlib_Decompress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args length = ival; } skip_optional_posonly: - return_value = zlib_Decompress_flush_impl(self, cls, length); + return_value = zlib_Decompress_flush_impl((compobject *)self, cls, length); exit: return return_value; @@ -915,7 +915,7 @@ zlib_ZlibDecompressor_decompress_impl(ZlibDecompressor *self, Py_buffer *data, Py_ssize_t max_length); static PyObject * -zlib_ZlibDecompressor_decompress(ZlibDecompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_ZlibDecompressor_decompress(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -972,7 +972,7 @@ zlib_ZlibDecompressor_decompress(ZlibDecompressor *self, PyObject *const *args, max_length = ival; } skip_optional_pos: - return_value = zlib_ZlibDecompressor_decompress_impl(self, &data, max_length); + return_value = zlib_ZlibDecompressor_decompress_impl((ZlibDecompressor *)self, &data, max_length); exit: /* Cleanup for data */ @@ -1109,4 +1109,4 @@ zlib_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #ifndef ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #define ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #endif /* !defined(ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF) */ -/*[clinic end generated code: output=2fef49f168842b17 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=969872868c303e8a input=a9049054013a1b77]*/ diff --git a/Modules/config.c.in b/Modules/config.c.in index 53b4fb285498d0..c578cd103dc629 100644 --- a/Modules/config.c.in +++ b/Modules/config.c.in @@ -1,13 +1,3 @@ -/* -*- C -*- *********************************************** -Copyright (c) 2000, BeOpen.com. -Copyright (c) 1995-2000, Corporation for National Research Initiatives. -Copyright (c) 1990-1995, Stichting Mathematisch Centrum. -All rights reserved. - -See the file "Misc/COPYRIGHT" for information on usage and -redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES. -******************************************************************/ - /* Module configuration */ /* !!! !!! !!! This file is edited by the makesetup script !!! !!! !!! */ diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index b44b964b29484b..a15ced22677ab7 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -1346,7 +1346,7 @@ PyInit_faulthandler(void) static int faulthandler_init_enable(void) { - PyObject *enable = _PyImport_GetModuleAttrString("faulthandler", "enable"); + PyObject *enable = PyImport_ImportModuleAttrString("faulthandler", "enable"); if (enable == NULL) { return -1; } diff --git a/Modules/getpath.c b/Modules/getpath.c index 18ddfaf8dbce1a..e2478da021f511 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -17,10 +17,13 @@ #endif #ifdef __APPLE__ -# include # include #endif +#ifdef HAVE_DLFCN_H +# include +#endif + /* Reference the precompiled getpath.py */ #include "Python/frozen_modules/getpath.h" @@ -803,36 +806,25 @@ progname_to_dict(PyObject *dict, const char *key) static int library_to_dict(PyObject *dict, const char *key) { +/* macOS framework builds do not link against a libpython dynamic library, but + instead link against a macOS Framework. */ +#if defined(Py_ENABLE_SHARED) || defined(WITH_NEXT_FRAMEWORK) + #ifdef MS_WINDOWS -#ifdef Py_ENABLE_SHARED extern HMODULE PyWin_DLLhModule; if (PyWin_DLLhModule) { return winmodule_to_dict(dict, key, PyWin_DLLhModule); } #endif -#elif defined(WITH_NEXT_FRAMEWORK) - static char modPath[MAXPATHLEN + 1]; - static int modPathInitialized = -1; - if (modPathInitialized < 0) { - modPathInitialized = 0; - - /* On Mac OS X we have a special case if we're running from a framework. - This is because the python home should be set relative to the library, - which is in the framework, not relative to the executable, which may - be outside of the framework. Except when we're in the build - directory... */ - Dl_info pythonInfo; - if (dladdr(&Py_Initialize, &pythonInfo)) { - if (pythonInfo.dli_fname) { - strncpy(modPath, pythonInfo.dli_fname, MAXPATHLEN); - modPathInitialized = 1; - } - } - } - if (modPathInitialized > 0) { - return decode_to_dict(dict, key, modPath); + +#if HAVE_DLADDR + Dl_info libpython_info; + if (dladdr(&Py_Initialize, &libpython_info) && libpython_info.dli_fname) { + return decode_to_dict(dict, key, libpython_info.dli_fname); } #endif +#endif + return PyDict_SetItemString(dict, key, Py_None) == 0; } @@ -963,7 +955,7 @@ _PyConfig_InitPathConfig(PyConfig *config, int compute_path_config) ) { Py_DECREF(co); Py_DECREF(dict); - PyErr_FormatUnraisable("Exception ignored in preparing getpath"); + PyErr_FormatUnraisable("Exception ignored while preparing getpath"); return PyStatus_Error("error evaluating initial values"); } @@ -972,13 +964,13 @@ _PyConfig_InitPathConfig(PyConfig *config, int compute_path_config) if (!r) { Py_DECREF(dict); - PyErr_FormatUnraisable("Exception ignored in running getpath"); + PyErr_FormatUnraisable("Exception ignored while running getpath"); return PyStatus_Error("error evaluating path"); } Py_DECREF(r); if (_PyConfig_FromDict(config, configDict) < 0) { - PyErr_FormatUnraisable("Exception ignored in reading getpath results"); + PyErr_FormatUnraisable("Exception ignored while reading getpath results"); Py_DECREF(dict); return PyStatus_Error("error getting getpath results"); } diff --git a/Modules/getpath.py b/Modules/getpath.py index c34101e720851d..be2210345afbda 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -625,6 +625,8 @@ def search_up(prefix, *landmarks, test=isfile): # gh-100320: Our PYDs are assumed to be relative to the Lib directory # (that is, prefix) rather than the executable (that is, executable_dir) exec_prefix = prefix + if not exec_prefix and prefix and isdir(joinpath(prefix, PLATSTDLIB_LANDMARK)): + exec_prefix = prefix if not exec_prefix and executable_dir: exec_prefix = search_up(executable_dir, PLATSTDLIB_LANDMARK, test=isdir) if not exec_prefix and EXEC_PREFIX: diff --git a/Modules/main.c b/Modules/main.c index 3bf2241f2837a3..5bb1de2d04d30c 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -370,7 +370,7 @@ pymain_run_file_obj(PyObject *program_name, PyObject *filename, return pymain_exit_err_print(); } - FILE *fp = _Py_fopen_obj(filename, "rb"); + FILE *fp = Py_fopen(filename, "rb"); if (fp == NULL) { // Ignore the OSError PyErr_Clear(); @@ -465,7 +465,7 @@ pymain_run_startup(PyConfig *config, int *exitcode) goto error; } - FILE *fp = _Py_fopen_obj(startup, "r"); + FILE *fp = Py_fopen(startup, "r"); if (fp == NULL) { int save_errno = errno; PyErr_Clear(); diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 29638114dd94a9..b4c15a143f9838 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -858,12 +858,15 @@ math_lcm_impl(PyObject *module, PyObject * const *args, * true (1), but may return false (0) without setting up an exception. */ static int -is_error(double x) +is_error(double x, int raise_edom) { int result = 1; /* presumption of guilt */ assert(errno); /* non-zero errno is a precondition for calling */ - if (errno == EDOM) - PyErr_SetString(PyExc_ValueError, "math domain error"); + if (errno == EDOM) { + if (raise_edom) { + PyErr_SetString(PyExc_ValueError, "math domain error"); + } + } else if (errno == ERANGE) { /* ANSI C generally requires libm functions to set ERANGE @@ -928,7 +931,8 @@ is_error(double x) */ static PyObject * -math_1(PyObject *arg, double (*func) (double), int can_overflow) +math_1(PyObject *arg, double (*func) (double), int can_overflow, + const char *err_msg) { double x, r; x = PyFloat_AsDouble(arg); @@ -936,25 +940,34 @@ math_1(PyObject *arg, double (*func) (double), int can_overflow) return NULL; errno = 0; r = (*func)(x); - if (isnan(r) && !isnan(x)) { - PyErr_SetString(PyExc_ValueError, - "math domain error"); /* invalid arg */ - return NULL; - } + if (isnan(r) && !isnan(x)) + goto domain_err; /* domain error */ if (isinf(r) && isfinite(x)) { if (can_overflow) PyErr_SetString(PyExc_OverflowError, "math range error"); /* overflow */ else - PyErr_SetString(PyExc_ValueError, - "math domain error"); /* singularity */ + goto domain_err; /* singularity */ return NULL; } - if (isfinite(r) && errno && is_error(r)) + if (isfinite(r) && errno && is_error(r, 1)) /* this branch unnecessary on most platforms */ return NULL; return PyFloat_FromDouble(r); + +domain_err: + if (err_msg) { + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, err_msg, buf); + PyMem_Free(buf); + } + } + else { + PyErr_SetString(PyExc_ValueError, "math domain error"); + } + return NULL; } /* variant of math_1, to be used when the function being wrapped is known to @@ -962,7 +975,7 @@ math_1(PyObject *arg, double (*func) (double), int can_overflow) errno = ERANGE for overflow). */ static PyObject * -math_1a(PyObject *arg, double (*func) (double)) +math_1a(PyObject *arg, double (*func) (double), const char *err_msg) { double x, r; x = PyFloat_AsDouble(arg); @@ -970,8 +983,17 @@ math_1a(PyObject *arg, double (*func) (double)) return NULL; errno = 0; r = (*func)(x); - if (errno && is_error(r)) + if (errno && is_error(r, err_msg ? 0 : 1)) { + if (err_msg && errno == EDOM) { + assert(!PyErr_Occurred()); /* exception is not set by is_error() */ + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, err_msg, buf); + PyMem_Free(buf); + } + } return NULL; + } return PyFloat_FromDouble(r); } @@ -1031,7 +1053,7 @@ math_2(PyObject *const *args, Py_ssize_t nargs, else errno = 0; } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; else return PyFloat_FromDouble(r); @@ -1039,13 +1061,25 @@ math_2(PyObject *const *args, Py_ssize_t nargs, #define FUNC1(funcname, func, can_overflow, docstring) \ static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ - return math_1(args, func, can_overflow); \ + return math_1(args, func, can_overflow, NULL); \ + }\ + PyDoc_STRVAR(math_##funcname##_doc, docstring); + +#define FUNC1D(funcname, func, can_overflow, docstring, err_msg) \ + static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ + return math_1(args, func, can_overflow, err_msg); \ }\ PyDoc_STRVAR(math_##funcname##_doc, docstring); #define FUNC1A(funcname, func, docstring) \ static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ - return math_1a(args, func); \ + return math_1a(args, func, NULL); \ + }\ + PyDoc_STRVAR(math_##funcname##_doc, docstring); + +#define FUNC1AD(funcname, func, docstring, err_msg) \ + static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ + return math_1a(args, func, err_msg); \ }\ PyDoc_STRVAR(math_##funcname##_doc, docstring); @@ -1077,9 +1111,10 @@ FUNC2(atan2, atan2, "atan2($module, y, x, /)\n--\n\n" "Return the arc tangent (measured in radians) of y/x.\n\n" "Unlike atan(y/x), the signs of both x and y are considered.") -FUNC1(atanh, atanh, 0, +FUNC1D(atanh, atanh, 0, "atanh($module, x, /)\n--\n\n" - "Return the inverse hyperbolic tangent of x.") + "Return the inverse hyperbolic tangent of x.", + "expected a number between -1 and 1, got %s") FUNC1(cbrt, cbrt, 0, "cbrt($module, x, /)\n--\n\n" "Return the cube root of x.") @@ -1190,9 +1225,10 @@ math_floor(PyObject *module, PyObject *number) return PyLong_FromDouble(floor(x)); } -FUNC1A(gamma, m_tgamma, +FUNC1AD(gamma, m_tgamma, "gamma($module, x, /)\n--\n\n" - "Gamma function at x.") + "Gamma function at x.", + "expected a float or nonnegative integer, got %s") FUNC1A(lgamma, m_lgamma, "lgamma($module, x, /)\n--\n\n" "Natural logarithm of absolute value of Gamma function at x.") @@ -1212,9 +1248,10 @@ FUNC1(sin, sin, 0, FUNC1(sinh, sinh, 1, "sinh($module, x, /)\n--\n\n" "Return the hyperbolic sine of x.") -FUNC1(sqrt, sqrt, 0, +FUNC1D(sqrt, sqrt, 0, "sqrt($module, x, /)\n--\n\n" - "Return the square root of x.") + "Return the square root of x.", + "expected a nonnegative input, got %s") FUNC1(tan, tan, 0, "tan($module, x, /)\n--\n\n" "Return the tangent of x (measured in radians).") @@ -2141,7 +2178,7 @@ math_ldexp_impl(PyObject *module, double x, PyObject *i) errno = ERANGE; } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; return PyFloat_FromDouble(r); } @@ -2195,8 +2232,8 @@ loghelper(PyObject* arg, double (*func)(double)) /* Negative or zero inputs give a ValueError. */ if (!_PyLong_IsPositive((PyLongObject *)arg)) { - PyErr_SetString(PyExc_ValueError, - "math domain error"); + PyErr_Format(PyExc_ValueError, + "expected a positive input, got %S", arg); return NULL; } @@ -2220,7 +2257,7 @@ loghelper(PyObject* arg, double (*func)(double)) } /* Else let libm handle it by itself. */ - return math_1(arg, func, 0); + return math_1(arg, func, 0, "expected a positive input, got %s"); } @@ -2369,7 +2406,7 @@ math_fmod_impl(PyObject *module, double x, double y) else errno = 0; } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; else return PyFloat_FromDouble(r); @@ -3010,7 +3047,7 @@ math_pow_impl(PyObject *module, double x, double y) } } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; else return PyFloat_FromDouble(r); diff --git a/Modules/md5module.c b/Modules/md5module.c index ef9163e8be5b6c..d86c8e555012d7 100644 --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -54,6 +54,8 @@ typedef struct { Hacl_Hash_MD5_state_t *hash_state; } MD5object; +#define _MD5object_CAST(op) ((MD5object *)(op)) + #include "clinic/md5module.c.h" @@ -72,7 +74,7 @@ md5_get_state(PyObject *module) static MD5object * newMD5object(MD5State * st) { - MD5object *md5 = (MD5object *)PyObject_GC_New(MD5object, st->md5_type); + MD5object *md5 = PyObject_GC_New(MD5object, st->md5_type); if (!md5) { return NULL; } @@ -91,10 +93,11 @@ MD5_traverse(PyObject *ptr, visitproc visit, void *arg) } static void -MD5_dealloc(MD5object *ptr) +MD5_dealloc(PyObject *op) { + MD5object *ptr = _MD5object_CAST(op); Hacl_Hash_MD5_free(ptr->hash_state); - PyTypeObject *tp = Py_TYPE((PyObject*)ptr); + PyTypeObject *tp = Py_TYPE(op); PyObject_GC_UnTrack(ptr); PyObject_GC_Del(ptr); Py_DECREF(tp); @@ -224,36 +227,27 @@ static PyMethodDef MD5_methods[] = { }; static PyObject * -MD5_get_block_size(PyObject *self, void *closure) +MD5_get_block_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(MD5_BLOCKSIZE); } static PyObject * -MD5_get_name(PyObject *self, void *closure) +MD5_get_name(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyUnicode_FromStringAndSize("md5", 3); } static PyObject * -md5_get_digest_size(PyObject *self, void *closure) +md5_get_digest_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(MD5_DIGESTSIZE); } static PyGetSetDef MD5_getseters[] = { - {"block_size", - (getter)MD5_get_block_size, NULL, - NULL, - NULL}, - {"name", - (getter)MD5_get_name, NULL, - NULL, - NULL}, - {"digest_size", - (getter)md5_get_digest_size, NULL, - NULL, - NULL}, + {"block_size", MD5_get_block_size, NULL, NULL, NULL}, + {"name", MD5_get_name, NULL, NULL, NULL}, + {"digest_size", md5_get_digest_size, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; diff --git a/Modules/overlapped.c b/Modules/overlapped.c index 308a0dab7fab1a..806ebee7a70ff1 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -759,7 +759,8 @@ Overlapped_dealloc(OverlappedObject *self) PyExc_RuntimeError, "%R still has pending operation at " "deallocation, the process may crash", self); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while deallocating " + "overlapped operation %R", self); } } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 151d469983fafb..6dfe73017abf9d 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -7,6 +7,8 @@ of the compiler used. Different compilers define their own feature test macro, e.g. '_MSC_VER'. */ +// --- Python includes ------------------------------------------------------ + #include "Python.h" #ifdef __VXWORKS__ @@ -26,255 +28,63 @@ #include "pycore_time.h" // _PyLong_FromTime_t() #include "pycore_typeobject.h" // _PyType_AddMethod() -#ifdef HAVE_UNISTD_H -# include // symlink() -#endif - -#ifdef MS_WINDOWS -# include -# if !defined(MS_WINDOWS_GAMES) || defined(MS_WINDOWS_DESKTOP) -# include -# endif -# include -# include // UNLEN -# include "osdefs.h" // SEP -# include // SetEntriesInAcl -# include // SDDL_REVISION_1 -# if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) -# define HAVE_SYMLINK -# endif /* MS_WINDOWS_DESKTOP | MS_WINDOWS_SYSTEM */ -#endif - #ifndef MS_WINDOWS -# include "posixmodule.h" +# include "posixmodule.h" // _PyLong_FromUid() #else -# include "pycore_fileutils_windows.h" -# include "winreparse.h" +# include "pycore_fileutils_windows.h" // _Py_GetFileInformationByName() +# include "osdefs.h" // SEP +# include "winreparse.h" // _Py_REPARSE_DATA_BUFFER #endif -#if !defined(EX_OK) && defined(EXIT_SUCCESS) -# define EX_OK EXIT_SUCCESS + +// --- System includes ------------------------------------------------------ + +#include // ctermid() +#include // system() + +#ifdef HAVE_UNISTD_H +# include // symlink() #endif #ifdef __APPLE__ - /* Needed for the implementation of os.statvfs */ + /* Needed for the implementation of os.statvfs */ # include # include #endif -/* On android API level 21, 'AT_EACCESS' is not declared although - * HAVE_FACCESSAT is defined. */ -#ifdef __ANDROID__ -# undef HAVE_FACCESSAT -#endif - -#include // ctermid() -#include // system() #ifdef HAVE_SYS_TIME_H # include // futimes() #endif + #ifdef HAVE_SYS_PIDFD_H # include // PIDFD_NONBLOCK #endif - -// SGI apparently needs this forward declaration -#ifdef HAVE__GETPTY -# include // mode_t - extern char * _getpty(int *, int, mode_t, int); -#endif - #ifdef __EMSCRIPTEN__ -#include "emscripten.h" // emscripten_debugger() -#endif - -/* - * A number of APIs are available on macOS from a certain macOS version. - * To support building with a new SDK while deploying to older versions - * the availability test is split into two: - * - HAVE_: The configure check for compile time availability - * - HAVE__RUNTIME: Runtime check for availability - * - * The latter is always true when not on macOS, or when using a compiler - * that does not support __has_builtin (older versions of Xcode). - * - * Due to compiler restrictions there is one valid use of HAVE__RUNTIME: - * if (HAVE__RUNTIME) { ... } - * - * In mixing the test with other tests or using negations will result in compile - * errors. - */ -#if defined(__APPLE__) - -#include - -#if defined(__has_builtin) -#if __has_builtin(__builtin_available) -#define HAVE_BUILTIN_AVAILABLE 1 -#endif -#endif - -#ifdef HAVE_BUILTIN_AVAILABLE -# define HAVE_FSTATAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FACCESSAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FCHMODAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FCHOWNAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_LINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FDOPENDIR_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_MKDIRAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_RENAMEAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_UNLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_OPENAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_READLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_SYMLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FUTIMENS_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) -# define HAVE_UTIMENSAT_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) -# define HAVE_PWRITEV_RUNTIME __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) -# define HAVE_MKFIFOAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) -# define HAVE_MKNODAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) -# define HAVE_PTSNAME_R_RUNTIME __builtin_available(macOS 10.13.4, iOS 11.3, tvOS 11.3, watchOS 4.3, *) - -# define HAVE_POSIX_SPAWN_SETSID_RUNTIME __builtin_available(macOS 10.15, *) - -#else /* Xcode 8 or earlier */ - - /* __builtin_available is not present in these compilers, but - * some of the symbols might be weak linked (10.10 SDK or later - * deploying on 10.9. - * - * Fall back to the older style of availability checking for - * symbols introduced in macOS 10.10. - */ - -# ifdef HAVE_FSTATAT -# define HAVE_FSTATAT_RUNTIME (fstatat != NULL) -# endif - -# ifdef HAVE_FACCESSAT -# define HAVE_FACCESSAT_RUNTIME (faccessat != NULL) -# endif - -# ifdef HAVE_FCHMODAT -# define HAVE_FCHMODAT_RUNTIME (fchmodat != NULL) -# endif - -# ifdef HAVE_FCHOWNAT -# define HAVE_FCHOWNAT_RUNTIME (fchownat != NULL) -# endif - -# ifdef HAVE_LINKAT -# define HAVE_LINKAT_RUNTIME (linkat != NULL) -# endif - -# ifdef HAVE_FDOPENDIR -# define HAVE_FDOPENDIR_RUNTIME (fdopendir != NULL) -# endif - -# ifdef HAVE_MKDIRAT -# define HAVE_MKDIRAT_RUNTIME (mkdirat != NULL) -# endif - -# ifdef HAVE_RENAMEAT -# define HAVE_RENAMEAT_RUNTIME (renameat != NULL) -# endif - -# ifdef HAVE_UNLINKAT -# define HAVE_UNLINKAT_RUNTIME (unlinkat != NULL) -# endif - -# ifdef HAVE_OPENAT -# define HAVE_OPENAT_RUNTIME (openat != NULL) -# endif - -# ifdef HAVE_READLINKAT -# define HAVE_READLINKAT_RUNTIME (readlinkat != NULL) -# endif - -# ifdef HAVE_SYMLINKAT -# define HAVE_SYMLINKAT_RUNTIME (symlinkat != NULL) -# endif - -# ifdef HAVE_UTIMENSAT -# define HAVE_UTIMENSAT_RUNTIME (utimensat != NULL) -# endif - -# ifdef HAVE_FUTIMENS -# define HAVE_FUTIMENS_RUNTIME (futimens != NULL) -# endif - -# ifdef HAVE_PWRITEV -# define HAVE_PWRITEV_RUNTIME (pwritev != NULL) -# endif - -# ifdef HAVE_MKFIFOAT -# define HAVE_MKFIFOAT_RUNTIME (mkfifoat != NULL) -# endif - -# ifdef HAVE_MKNODAT -# define HAVE_MKNODAT_RUNTIME (mknodat != NULL) -# endif - -# ifdef HAVE_PTSNAME_R -# define HAVE_PTSNAME_R_RUNTIME (ptsname_r != NULL) -# endif - -#endif - -#ifdef HAVE_FUTIMESAT -/* Some of the logic for weak linking depends on this assertion */ -# error "HAVE_FUTIMESAT unexpectedly defined" +# include "emscripten.h" // emscripten_debugger() #endif -#else -# define HAVE_FSTATAT_RUNTIME 1 -# define HAVE_FACCESSAT_RUNTIME 1 -# define HAVE_FCHMODAT_RUNTIME 1 -# define HAVE_FCHOWNAT_RUNTIME 1 -# define HAVE_LINKAT_RUNTIME 1 -# define HAVE_FDOPENDIR_RUNTIME 1 -# define HAVE_MKDIRAT_RUNTIME 1 -# define HAVE_RENAMEAT_RUNTIME 1 -# define HAVE_UNLINKAT_RUNTIME 1 -# define HAVE_OPENAT_RUNTIME 1 -# define HAVE_READLINKAT_RUNTIME 1 -# define HAVE_SYMLINKAT_RUNTIME 1 -# define HAVE_FUTIMENS_RUNTIME 1 -# define HAVE_UTIMENSAT_RUNTIME 1 -# define HAVE_PWRITEV_RUNTIME 1 -# define HAVE_MKFIFOAT_RUNTIME 1 -# define HAVE_MKNODAT_RUNTIME 1 -# define HAVE_PTSNAME_R_RUNTIME 1 -#endif - - -PyDoc_STRVAR(posix__doc__, -"This module provides access to operating system functionality that is\n\ -standardized by the C Standard and the POSIX standard (a thinly\n\ -disguised Unix interface). Refer to the library manual and\n\ -corresponding Unix manual entries for more information on calls."); - - #ifdef HAVE_SYS_UIO_H # include #endif #ifdef HAVE_SYS_TYPES_H -/* Should be included before on HP-UX v3 */ + /* Should be included before on HP-UX v3 */ # include -#endif /* HAVE_SYS_TYPES_H */ - +#endif #ifdef HAVE_SYS_SYSMACROS_H -/* GNU C Library: major(), minor(), makedev() */ + /* GNU C Library: major(), minor(), makedev() */ # include #endif #ifdef HAVE_SYS_STAT_H # include -#endif /* HAVE_SYS_STAT_H */ +#endif #ifdef HAVE_SYS_WAIT_H # include // WNOHANG #endif + #ifdef HAVE_LINUX_WAIT_H # include // P_PIDFD #endif @@ -284,54 +94,34 @@ corresponding Unix manual entries for more information on calls."); #endif #ifdef HAVE_FCNTL_H -# include +# include // fcntl() #endif #ifdef HAVE_GRP_H -# include +# include // setgroups() #endif #ifdef HAVE_SYSEXITS_H -# include +# include // EX_OK #endif #ifdef HAVE_SYS_LOADAVG_H -# include +# include // getloadavg() #endif #ifdef HAVE_SYS_SENDFILE_H -# include +# include // sendfile() #endif #if defined(__APPLE__) -# include +# include // fcopyfile() #endif #ifdef HAVE_SCHED_H -# include +# include // sched_setscheduler() #endif - #ifdef HAVE_LINUX_SCHED_H -# include -#endif - -#if !defined(CPU_ALLOC) && defined(HAVE_SCHED_SETAFFINITY) -# undef HAVE_SCHED_SETAFFINITY -#endif - -#if defined(HAVE_SYS_XATTR_H) -# if defined(HAVE_LINUX_LIMITS_H) && !defined(__FreeBSD_kernel__) && !defined(__GNU__) -# define USE_XATTRS -# include // Needed for XATTR_SIZE_MAX on musl libc. -# endif -# if defined(__CYGWIN__) -# define USE_XATTRS -# include // Needed for XATTR_SIZE_MAX and XATTR_LIST_MAX. -# endif -#endif - -#ifdef USE_XATTRS -# include +# include // SCHED_IDLE, SCHED_RR #endif #if defined(__FreeBSD__) || defined(__DragonFly__) || defined(__APPLE__) @@ -357,23 +147,128 @@ corresponding Unix manual entries for more information on calls."); #endif #ifdef HAVE_LINUX_RANDOM_H -# include +# include // GRND_RANDOM #endif #ifdef HAVE_GETRANDOM_SYSCALL -# include +# include // syscall() +#endif + +#ifdef HAVE_POSIX_SPAWN +# include // posix_spawn() +#endif + +#ifdef HAVE_UTIME_H +# include // utime() +#endif + +#ifdef HAVE_SYS_UTIME_H +# include +# define HAVE_UTIME_H /* pretend we do for the rest of this file */ +#endif + +#ifdef HAVE_SYS_TIMES_H +# include // times() +#endif + +#ifdef HAVE_SYS_PARAM_H +# include +#endif + +#ifdef HAVE_SYS_UTSNAME_H +# include // uname() +#endif + +/* memfd_create is either defined in sys/mman.h or sys/memfd.h + * linux/memfd.h defines additional flags + */ +#ifdef HAVE_SYS_MMAN_H +# include // memfd_create() +#endif +#ifdef HAVE_SYS_MEMFD_H +# include // memfd_create() +#endif +#ifdef HAVE_LINUX_MEMFD_H +# include // memfd_create(), MFD_CLOEXEC +#endif + +#ifdef HAVE_SYS_EVENTFD_H +# include // eventfd() +#endif + +#ifdef HAVE_SYS_TIMERFD_H +# include // timerfd_create() #endif +#ifdef _Py_MEMORY_SANITIZER +# include // __msan_unpoison() +#endif + + +// --- More complex system includes ----------------------------------------- + +#ifdef MS_WINDOWS +# include +# if !defined(MS_WINDOWS_GAMES) || defined(MS_WINDOWS_DESKTOP) +# include // PathCchSkipRoot() +# endif +# include // SetEntriesInAcl +# include // UNLEN +# include // SDDL_REVISION_1 +# include // FSCTL_GET_REPARSE_POINT +# if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) +# define HAVE_SYMLINK +# endif /* MS_WINDOWS_DESKTOP | MS_WINDOWS_SYSTEM */ +#endif + + +#ifdef _MSC_VER +# ifdef HAVE_DIRECT_H +# include +# endif +# ifdef HAVE_IO_H +# include +# endif +# ifdef HAVE_PROCESS_H +# include // getpid(), _cwait() +# endif +# include +#endif /* _MSC_VER */ + + +#ifdef HAVE__GETPTY +# include // mode_t + // SGI apparently needs this forward declaration + extern char * _getpty(int *, int, mode_t, int); +#endif + + +#if defined(HAVE_SYS_XATTR_H) +# if defined(HAVE_LINUX_LIMITS_H) && !defined(__FreeBSD_kernel__) && !defined(__GNU__) +# define USE_XATTRS +# include // Needed for XATTR_SIZE_MAX on musl libc. +# endif +# if defined(__CYGWIN__) +# define USE_XATTRS +# include // Needed for XATTR_SIZE_MAX and XATTR_LIST_MAX. +# endif +#endif +#ifdef USE_XATTRS +# include // fgetxattr() +#endif + + #ifdef HAVE_WINDOWS_CONSOLE_IO # define TERMSIZE_USE_CONIO #elif defined(HAVE_SYS_IOCTL_H) -# include +# include // ioctl(), TIOCGWINSZ # if defined(HAVE_TERMIOS_H) # include # endif # if defined(TIOCGWINSZ) # define TERMSIZE_USE_IOCTL # endif -#endif /* HAVE_WINDOWS_CONSOLE_IO */ +#endif + /* Various compilers have only certain posix functions */ /* XXX Gosh I wish these were all moved into pyconfig.h */ @@ -400,23 +295,15 @@ corresponding Unix manual entries for more information on calls."); # define HAVE_PIPE 1 # define HAVE_FSYNC 1 # define fsync _commit -#endif /* ! __WATCOMC__ || __QNX__ */ - -/*[clinic input] -# one of the few times we lie about this name! -module os -[clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=94a0f0f978acae17]*/ +#endif -#ifndef _MSC_VER -#if defined(__sgi)&&_COMPILER_VERSION>=700 +#if !defined(_MSC_VER) && defined(__sgi) && _COMPILER_VERSION>=700 /* declare ctermid_r if compiling with MIPSPro 7.x in ANSI C mode (default) */ -extern char *ctermid_r(char *); +extern char *ctermid_r(char *); #endif -#endif /* !_MSC_VER */ #if defined(__VXWORKS__) # include @@ -430,33 +317,9 @@ extern char *ctermid_r(char *); # endif #endif /* __VXWORKS__ */ -#ifdef HAVE_POSIX_SPAWN -# include -#endif - -#ifdef HAVE_UTIME_H -# include -#endif /* HAVE_UTIME_H */ - -#ifdef HAVE_SYS_UTIME_H -# include -# define HAVE_UTIME_H /* pretend we do for the rest of this file */ -#endif /* HAVE_SYS_UTIME_H */ - -#ifdef HAVE_SYS_TIMES_H -# include -#endif /* HAVE_SYS_TIMES_H */ - -#ifdef HAVE_SYS_PARAM_H -# include -#endif /* HAVE_SYS_PARAM_H */ - -#ifdef HAVE_SYS_UTSNAME_H -# include -#endif /* HAVE_SYS_UTSNAME_H */ #ifdef HAVE_DIRENT_H -# include +# include // opendir() # define NAMLEN(dirent) strlen((dirent)->d_name) #else # if defined(__WATCOMC__) && !defined(__QNX__) @@ -477,18 +340,20 @@ extern char *ctermid_r(char *); # endif #endif -#ifdef _MSC_VER -# ifdef HAVE_DIRECT_H -# include -# endif -# ifdef HAVE_IO_H -# include + +#if defined(MAJOR_IN_MKDEV) +# include +#else +# if defined(MAJOR_IN_SYSMACROS) +# include # endif -# ifdef HAVE_PROCESS_H -# include +# if defined(HAVE_MKNOD) && defined(HAVE_SYS_MKDEV_H) +# include # endif -# include -#endif /* _MSC_VER */ +#endif + + +// --- Macros --------------------------------------------------------------- #ifndef MAXPATHLEN # if defined(PATH_MAX) && PATH_MAX > 1024 @@ -498,6 +363,7 @@ extern char *ctermid_r(char *); # endif #endif /* MAXPATHLEN */ + #ifdef UNION_WAIT /* Emulate some macros on systems that have a union instead of macros */ # ifndef WIFEXITED @@ -517,12 +383,14 @@ extern char *ctermid_r(char *); # define WAIT_STATUS_INT(s) (s) #endif /* UNION_WAIT */ + /* Don't use the "_r" form if we don't need it (also, won't have a prototype for it, at least on Solaris -- maybe others as well?). */ #if defined(HAVE_CTERMID_R) # define USE_CTERMID_R #endif + /* choose the appropriate stat and fstat functions and return structs */ #undef STAT #undef FSTAT @@ -539,25 +407,19 @@ extern char *ctermid_r(char *); # define STRUCT_STAT struct stat #endif -#if defined(MAJOR_IN_MKDEV) -# include -#else -# if defined(MAJOR_IN_SYSMACROS) -# include -# endif -# if defined(HAVE_MKNOD) && defined(HAVE_SYS_MKDEV_H) -# include -# endif + +#if !defined(EX_OK) && defined(EXIT_SUCCESS) +# define EX_OK EXIT_SUCCESS #endif -#ifdef MS_WINDOWS -# define INITFUNC PyInit_nt -# define MODNAME "nt" -# define MODNAME_OBJ &_Py_ID(nt) -#else -# define INITFUNC PyInit_posix -# define MODNAME "posix" -# define MODNAME_OBJ &_Py_ID(posix) +#if !defined(CPU_ALLOC) && defined(HAVE_SCHED_SETAFFINITY) +# undef HAVE_SCHED_SETAFFINITY +#endif + +/* On android API level 21, 'AT_EACCESS' is not declared although + * HAVE_FACCESSAT is defined. */ +#ifdef __ANDROID__ +# undef HAVE_FACCESSAT #endif #if defined(__sun) @@ -565,33 +427,195 @@ extern char *ctermid_r(char *); # define HAVE_STRUCT_STAT_ST_FSTYPE 1 #endif -/* memfd_create is either defined in sys/mman.h or sys/memfd.h - * linux/memfd.h defines additional flags + +// --- Apple __builtin_available() macros ----------------------------------- + +/* + * A number of APIs are available on macOS from a certain macOS version. + * To support building with a new SDK while deploying to older versions + * the availability test is split into two: + * - HAVE_: The configure check for compile time availability + * - HAVE__RUNTIME: Runtime check for availability + * + * The latter is always true when not on macOS, or when using a compiler + * that does not support __has_builtin (older versions of Xcode). + * + * Due to compiler restrictions there is one valid use of HAVE__RUNTIME: + * if (HAVE__RUNTIME) { ... } + * + * In mixing the test with other tests or using negations will result in compile + * errors. */ -#ifdef HAVE_SYS_MMAN_H -# include +#if defined(__APPLE__) + +#include + +#if defined(__has_builtin) +#if __has_builtin(__builtin_available) +#define HAVE_BUILTIN_AVAILABLE 1 #endif -#ifdef HAVE_SYS_MEMFD_H -# include #endif -#ifdef HAVE_LINUX_MEMFD_H -# include + +#ifdef HAVE_BUILTIN_AVAILABLE +# define HAVE_FSTATAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FACCESSAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FCHMODAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FCHOWNAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_LINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FDOPENDIR_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_MKDIRAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_RENAMEAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_UNLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_OPENAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_READLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_SYMLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FUTIMENS_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) +# define HAVE_UTIMENSAT_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) +# define HAVE_PWRITEV_RUNTIME __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# define HAVE_MKFIFOAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) +# define HAVE_MKNODAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) +# define HAVE_PTSNAME_R_RUNTIME __builtin_available(macOS 10.13.4, iOS 11.3, tvOS 11.3, watchOS 4.3, *) + +# define HAVE_POSIX_SPAWN_SETSID_RUNTIME __builtin_available(macOS 10.15, *) + +#else /* Xcode 8 or earlier */ + + /* __builtin_available is not present in these compilers, but + * some of the symbols might be weak linked (10.10 SDK or later + * deploying on 10.9. + * + * Fall back to the older style of availability checking for + * symbols introduced in macOS 10.10. + */ + +# ifdef HAVE_FSTATAT +# define HAVE_FSTATAT_RUNTIME (fstatat != NULL) +# endif + +# ifdef HAVE_FACCESSAT +# define HAVE_FACCESSAT_RUNTIME (faccessat != NULL) +# endif + +# ifdef HAVE_FCHMODAT +# define HAVE_FCHMODAT_RUNTIME (fchmodat != NULL) +# endif + +# ifdef HAVE_FCHOWNAT +# define HAVE_FCHOWNAT_RUNTIME (fchownat != NULL) +# endif + +# ifdef HAVE_LINKAT +# define HAVE_LINKAT_RUNTIME (linkat != NULL) +# endif + +# ifdef HAVE_FDOPENDIR +# define HAVE_FDOPENDIR_RUNTIME (fdopendir != NULL) +# endif + +# ifdef HAVE_MKDIRAT +# define HAVE_MKDIRAT_RUNTIME (mkdirat != NULL) +# endif + +# ifdef HAVE_RENAMEAT +# define HAVE_RENAMEAT_RUNTIME (renameat != NULL) +# endif + +# ifdef HAVE_UNLINKAT +# define HAVE_UNLINKAT_RUNTIME (unlinkat != NULL) +# endif + +# ifdef HAVE_OPENAT +# define HAVE_OPENAT_RUNTIME (openat != NULL) +# endif + +# ifdef HAVE_READLINKAT +# define HAVE_READLINKAT_RUNTIME (readlinkat != NULL) +# endif + +# ifdef HAVE_SYMLINKAT +# define HAVE_SYMLINKAT_RUNTIME (symlinkat != NULL) +# endif + +# ifdef HAVE_UTIMENSAT +# define HAVE_UTIMENSAT_RUNTIME (utimensat != NULL) +# endif + +# ifdef HAVE_FUTIMENS +# define HAVE_FUTIMENS_RUNTIME (futimens != NULL) +# endif + +# ifdef HAVE_PWRITEV +# define HAVE_PWRITEV_RUNTIME (pwritev != NULL) +# endif + +# ifdef HAVE_MKFIFOAT +# define HAVE_MKFIFOAT_RUNTIME (mkfifoat != NULL) +# endif + +# ifdef HAVE_MKNODAT +# define HAVE_MKNODAT_RUNTIME (mknodat != NULL) +# endif + +# ifdef HAVE_PTSNAME_R +# define HAVE_PTSNAME_R_RUNTIME (ptsname_r != NULL) +# endif + #endif -/* eventfd() */ -#ifdef HAVE_SYS_EVENTFD_H -# include +#ifdef HAVE_FUTIMESAT +/* Some of the logic for weak linking depends on this assertion */ +# error "HAVE_FUTIMESAT unexpectedly defined" #endif -/* timerfd_create() */ -#ifdef HAVE_SYS_TIMERFD_H -# include +#else +# define HAVE_FSTATAT_RUNTIME 1 +# define HAVE_FACCESSAT_RUNTIME 1 +# define HAVE_FCHMODAT_RUNTIME 1 +# define HAVE_FCHOWNAT_RUNTIME 1 +# define HAVE_LINKAT_RUNTIME 1 +# define HAVE_FDOPENDIR_RUNTIME 1 +# define HAVE_MKDIRAT_RUNTIME 1 +# define HAVE_RENAMEAT_RUNTIME 1 +# define HAVE_UNLINKAT_RUNTIME 1 +# define HAVE_OPENAT_RUNTIME 1 +# define HAVE_READLINKAT_RUNTIME 1 +# define HAVE_SYMLINKAT_RUNTIME 1 +# define HAVE_FUTIMENS_RUNTIME 1 +# define HAVE_UTIMENSAT_RUNTIME 1 +# define HAVE_PWRITEV_RUNTIME 1 +# define HAVE_MKFIFOAT_RUNTIME 1 +# define HAVE_MKNODAT_RUNTIME 1 +# define HAVE_PTSNAME_R_RUNTIME 1 #endif -#ifdef _Py_MEMORY_SANITIZER -# include + +// --- os module ------------------------------------------------------------ + +#ifdef MS_WINDOWS +# define INITFUNC PyInit_nt +# define MODNAME "nt" +# define MODNAME_OBJ &_Py_ID(nt) +#else +# define INITFUNC PyInit_posix +# define MODNAME "posix" +# define MODNAME_OBJ &_Py_ID(posix) #endif +/*[clinic input] +# one of the few times we lie about this name! +module os +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=94a0f0f978acae17]*/ + +PyDoc_STRVAR(posix__doc__, +"This module provides access to operating system functionality that is\n\ +standardized by the C Standard and the POSIX standard (a thinly\n\ +disguised Unix interface). Refer to the library manual and\n\ +corresponding Unix manual entries for more information on calls."); + + +// --- Functions ------------------------------------------------------------ + #ifdef HAVE_FORK static void run_at_forkers(PyObject *lst, int reverse) @@ -606,8 +630,10 @@ run_at_forkers(PyObject *lst, int reverse) * one of the callbacks. */ cpy = PyList_GetSlice(lst, 0, PyList_GET_SIZE(lst)); - if (cpy == NULL) - PyErr_WriteUnraisable(lst); + if (cpy == NULL) { + PyErr_FormatUnraisable("Exception ignored in atfork callback " + "while copying list %R", lst); + } else { if (reverse) PyList_Reverse(cpy); @@ -615,10 +641,13 @@ run_at_forkers(PyObject *lst, int reverse) PyObject *func, *res; func = PyList_GET_ITEM(cpy, i); res = _PyObject_CallNoArgs(func); - if (res == NULL) - PyErr_WriteUnraisable(func); - else + if (res == NULL) { + PyErr_FormatUnraisable("Exception ignored " + "in atfork callback %R", func); + } + else { Py_DECREF(res); + } } Py_DECREF(cpy); } @@ -3347,7 +3376,7 @@ os_access_impl(PyObject *module, path_t *path, int mode, int dir_fd, #endif -#ifdef HAVE_TTYNAME +#ifdef HAVE_TTYNAME_R /*[clinic input] os.ttyname @@ -9582,42 +9611,33 @@ os_kill_impl(PyObject *module, pid_t pid, Py_ssize_t signal) Py_RETURN_NONE; #else /* !MS_WINDOWS */ - PyObject *result; DWORD sig = (DWORD)signal; - DWORD err; - HANDLE handle; #ifdef HAVE_WINDOWS_CONSOLE_IO /* Console processes which share a common console can be sent CTRL+C or CTRL+BREAK events, provided they handle said events. */ if (sig == CTRL_C_EVENT || sig == CTRL_BREAK_EVENT) { if (GenerateConsoleCtrlEvent(sig, (DWORD)pid) == 0) { - err = GetLastError(); - PyErr_SetFromWindowsErr(err); - } - else { - Py_RETURN_NONE; + return PyErr_SetFromWindowsErr(0); } + Py_RETURN_NONE; } #endif /* HAVE_WINDOWS_CONSOLE_IO */ /* If the signal is outside of what GenerateConsoleCtrlEvent can use, attempt to open and terminate the process. */ - handle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, (DWORD)pid); + HANDLE handle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, (DWORD)pid); if (handle == NULL) { - err = GetLastError(); - return PyErr_SetFromWindowsErr(err); + return PyErr_SetFromWindowsErr(0); } - if (TerminateProcess(handle, sig) == 0) { - err = GetLastError(); - result = PyErr_SetFromWindowsErr(err); - } else { - result = Py_NewRef(Py_None); + BOOL res = TerminateProcess(handle, sig); + CloseHandle(handle); + if (res == 0) { + return PyErr_SetFromWindowsErr(0); } - CloseHandle(handle); - return result; + Py_RETURN_NONE; #endif /* !MS_WINDOWS */ } #endif /* HAVE_KILL */ @@ -9886,7 +9906,7 @@ wait_helper(PyObject *module, pid_t pid, int status, struct rusage *ru) memset(ru, 0, sizeof(*ru)); } - struct_rusage = _PyImport_GetModuleAttrString("resource", "struct_rusage"); + struct_rusage = PyImport_ImportModuleAttrString("resource", "struct_rusage"); if (struct_rusage == NULL) return NULL; @@ -11442,6 +11462,38 @@ os_read_impl(PyObject *module, int fd, Py_ssize_t length) return buffer; } +/*[clinic input] +os.readinto -> Py_ssize_t + fd: int + buffer: Py_buffer(accept={rwbuffer}) + / + +Read into a buffer object from a file descriptor. + +The buffer should be mutable and bytes-like. On success, returns the number of +bytes read. Less bytes may be read than the size of the buffer. The underlying +system call will be retried when interrupted by a signal, unless the signal +handler raises an exception. Other errors will not be retried and an error will +be raised. + +Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0 +(which can be used to check for errors without reading data). Never returns +negative. +[clinic start generated code]*/ + +static Py_ssize_t +os_readinto_impl(PyObject *module, int fd, Py_buffer *buffer) +/*[clinic end generated code: output=8091a3513c683a80 input=d40074d0a68de575]*/ +{ + assert(buffer->len >= 0); + Py_ssize_t result = _Py_read(fd, buffer->buf, buffer->len); + /* Ensure negative is never returned without an error. Simplifies calling + code. _Py_read should succeed, possibly reading 0 bytes, _or_ set an + error. */ + assert(result >= 0 || (result == -1 && PyErr_Occurred())); + return result; +} + #if (defined(HAVE_SENDFILE) && (defined(__FreeBSD__) || defined(__DragonFly__) \ || defined(__APPLE__))) \ || defined(HAVE_READV) || defined(HAVE_PREADV) || defined (HAVE_PREADV2) \ @@ -16307,7 +16359,8 @@ ScandirIterator_finalize(ScandirIterator *iterator) "unclosed scandir iterator %R", iterator)) { /* Spurious errors can appear at shutdown */ if (PyErr_ExceptionMatches(PyExc_Warning)) { - PyErr_WriteUnraisable((PyObject *) iterator); + PyErr_FormatUnraisable("Exception ignored while finalizing " + "scandir iterator %R", iterator); } } } @@ -16982,6 +17035,7 @@ static PyMethodDef posix_methods[] = { OS_LOCKF_METHODDEF OS_LSEEK_METHODDEF OS_READ_METHODDEF + OS_READINTO_METHODDEF OS_READV_METHODDEF OS_PREAD_METHODDEF OS_PREADV_METHODDEF diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index 9931ca2a8d4749..3290706f143b9a 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -1948,7 +1948,8 @@ pyexpat_capsule_destructor(PyObject *capsule) { void *p = PyCapsule_GetPointer(capsule, PyExpat_CAPSULE_NAME); if (p == NULL) { - PyErr_WriteUnraisable(capsule); + PyErr_FormatUnraisable("Exception ignored while destroying " + "pyexact capsule"); return; } PyMem_Free(p); diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index e14e114a6dafd0..c75e2ba28c5b4e 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -14,7 +14,6 @@ #include "Python.h" #include "pycore_fileutils.h" // _Py_set_inheritable() -#include "pycore_import.h" // _PyImport_GetModuleAttrString() #include "pycore_time.h" // _PyTime_FromSecondsObject() #include @@ -1996,7 +1995,7 @@ kqueue_tracking_init(PyObject *module) { // Register a callback to invalidate kqueues with open fds after fork. PyObject *register_at_fork = NULL, *cb = NULL, *args = NULL, *kwargs = NULL, *result = NULL; - register_at_fork = _PyImport_GetModuleAttrString("posix", + register_at_fork = PyImport_ImportModuleAttrString("posix", "register_at_fork"); if (register_at_fork == NULL) { goto finally; diff --git a/Modules/sha1module.c b/Modules/sha1module.c index 34a427a39b5cf8..d0b1e8250770d0 100644 --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -55,6 +55,8 @@ typedef struct { Hacl_Hash_SHA1_state_t *hash_state; } SHA1object; +#define _SHA1object_CAST(op) ((SHA1object *)(op)) + #include "clinic/sha1module.c.h" @@ -73,7 +75,7 @@ sha1_get_state(PyObject *module) static SHA1object * newSHA1object(SHA1State *st) { - SHA1object *sha = (SHA1object *)PyObject_GC_New(SHA1object, st->sha1_type); + SHA1object *sha = PyObject_GC_New(SHA1object, st->sha1_type); if (sha == NULL) { return NULL; } @@ -93,8 +95,9 @@ SHA1_traverse(PyObject *ptr, visitproc visit, void *arg) } static void -SHA1_dealloc(SHA1object *ptr) +SHA1_dealloc(PyObject *op) { + SHA1object *ptr = _SHA1object_CAST(op); Hacl_Hash_SHA1_free(ptr->hash_state); PyTypeObject *tp = Py_TYPE(ptr); PyObject_GC_UnTrack(ptr); @@ -217,36 +220,27 @@ static PyMethodDef SHA1_methods[] = { }; static PyObject * -SHA1_get_block_size(PyObject *self, void *closure) +SHA1_get_block_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(SHA1_BLOCKSIZE); } static PyObject * -SHA1_get_name(PyObject *self, void *closure) +SHA1_get_name(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyUnicode_FromStringAndSize("sha1", 4); } static PyObject * -sha1_get_digest_size(PyObject *self, void *closure) +sha1_get_digest_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(SHA1_DIGESTSIZE); } static PyGetSetDef SHA1_getseters[] = { - {"block_size", - (getter)SHA1_get_block_size, NULL, - NULL, - NULL}, - {"name", - (getter)SHA1_get_name, NULL, - NULL, - NULL}, - {"digest_size", - (getter)sha1_get_digest_size, NULL, - NULL, - NULL}, + {"block_size", SHA1_get_block_size, NULL, NULL, NULL}, + {"name", SHA1_get_name, NULL, NULL, NULL}, + {"digest_size", sha1_get_digest_size, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; @@ -346,7 +340,7 @@ _sha1_clear(PyObject *module) static void _sha1_free(void *module) { - _sha1_clear((PyObject *)module); + (void)_sha1_clear((PyObject *)module); } static int diff --git a/Modules/sha2module.c b/Modules/sha2module.c index 7d6a1e40243f9d..45fa120cf76758 100644 --- a/Modules/sha2module.c +++ b/Modules/sha2module.c @@ -67,6 +67,9 @@ typedef struct { Hacl_Hash_SHA2_state_t_512 *state; } SHA512object; +#define _SHA256object_CAST(op) ((SHA256object *)(op)) +#define _SHA512object_CAST(op) ((SHA512object *)(op)) + #include "clinic/sha2module.c.h" /* We shall use run-time type information in the remainder of this module to @@ -101,8 +104,7 @@ static void SHA512copy(SHA512object *src, SHA512object *dest) static SHA256object * newSHA224object(sha2_state *state) { - SHA256object *sha = (SHA256object *)PyObject_GC_New( - SHA256object, state->sha224_type); + SHA256object *sha = PyObject_GC_New(SHA256object, state->sha224_type); if (!sha) { return NULL; } @@ -115,8 +117,7 @@ newSHA224object(sha2_state *state) static SHA256object * newSHA256object(sha2_state *state) { - SHA256object *sha = (SHA256object *)PyObject_GC_New( - SHA256object, state->sha256_type); + SHA256object *sha = PyObject_GC_New(SHA256object, state->sha256_type); if (!sha) { return NULL; } @@ -129,8 +130,7 @@ newSHA256object(sha2_state *state) static SHA512object * newSHA384object(sha2_state *state) { - SHA512object *sha = (SHA512object *)PyObject_GC_New( - SHA512object, state->sha384_type); + SHA512object *sha = PyObject_GC_New(SHA512object, state->sha384_type); if (!sha) { return NULL; } @@ -143,8 +143,7 @@ newSHA384object(sha2_state *state) static SHA512object * newSHA512object(sha2_state *state) { - SHA512object *sha = (SHA512object *)PyObject_GC_New( - SHA512object, state->sha512_type); + SHA512object *sha = PyObject_GC_New(SHA512object, state->sha512_type); if (!sha) { return NULL; } @@ -164,8 +163,9 @@ SHA2_traverse(PyObject *ptr, visitproc visit, void *arg) } static void -SHA256_dealloc(SHA256object *ptr) +SHA256_dealloc(PyObject *op) { + SHA256object *ptr = _SHA256object_CAST(op); Hacl_Hash_SHA2_free_256(ptr->state); PyTypeObject *tp = Py_TYPE(ptr); PyObject_GC_UnTrack(ptr); @@ -174,8 +174,9 @@ SHA256_dealloc(SHA256object *ptr) } static void -SHA512_dealloc(SHA512object *ptr) +SHA512_dealloc(PyObject *op) { + SHA512object *ptr = _SHA512object_CAST(op); Hacl_Hash_SHA2_free_512(ptr->state); PyTypeObject *tp = Py_TYPE(ptr); PyObject_GC_UnTrack(ptr); @@ -442,32 +443,35 @@ static PyMethodDef SHA512_methods[] = { }; static PyObject * -SHA256_get_block_size(PyObject *self, void *closure) +SHA256_get_block_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(SHA256_BLOCKSIZE); } static PyObject * -SHA512_get_block_size(PyObject *self, void *closure) +SHA512_get_block_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(SHA512_BLOCKSIZE); } static PyObject * -SHA256_get_digest_size(SHA256object *self, void *closure) +SHA256_get_digest_size(PyObject *op, void *Py_UNUSED(closure)) { + SHA256object *self = _SHA256object_CAST(op); return PyLong_FromLong(self->digestsize); } static PyObject * -SHA512_get_digest_size(SHA512object *self, void *closure) +SHA512_get_digest_size(PyObject *op, void *Py_UNUSED(closure)) { + SHA512object *self = _SHA512object_CAST(op); return PyLong_FromLong(self->digestsize); } static PyObject * -SHA256_get_name(SHA256object *self, void *closure) +SHA256_get_name(PyObject *op, void *Py_UNUSED(closure)) { + SHA256object *self = _SHA256object_CAST(op); if (self->digestsize == 28) { return PyUnicode_FromStringAndSize("sha224", 6); } @@ -475,8 +479,9 @@ SHA256_get_name(SHA256object *self, void *closure) } static PyObject * -SHA512_get_name(SHA512object *self, void *closure) +SHA512_get_name(PyObject *op, void *Py_UNUSED(closure)) { + SHA512object *self = _SHA512object_CAST(op); if (self->digestsize == 64) { return PyUnicode_FromStringAndSize("sha512", 6); } @@ -484,34 +489,16 @@ SHA512_get_name(SHA512object *self, void *closure) } static PyGetSetDef SHA256_getseters[] = { - {"block_size", - (getter)SHA256_get_block_size, NULL, - NULL, - NULL}, - {"name", - (getter)SHA256_get_name, NULL, - NULL, - NULL}, - {"digest_size", - (getter)SHA256_get_digest_size, NULL, - NULL, - NULL}, + {"block_size", SHA256_get_block_size, NULL, NULL, NULL}, + {"name", SHA256_get_name, NULL, NULL, NULL}, + {"digest_size", SHA256_get_digest_size, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; static PyGetSetDef SHA512_getseters[] = { - {"block_size", - (getter)SHA512_get_block_size, NULL, - NULL, - NULL}, - {"name", - (getter)SHA512_get_name, NULL, - NULL, - NULL}, - {"digest_size", - (getter)SHA512_get_digest_size, NULL, - NULL, - NULL}, + {"block_size", SHA512_get_block_size, NULL, NULL, NULL}, + {"name", SHA512_get_name, NULL, NULL, NULL}, + {"digest_size", SHA512_get_digest_size, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; @@ -818,7 +805,7 @@ _sha2_clear(PyObject *module) static void _sha2_free(void *module) { - _sha2_clear((PyObject *)module); + (void)_sha2_clear((PyObject *)module); } /* Initialize this module. */ diff --git a/Modules/sha3module.c b/Modules/sha3module.c index b13e6a9de10114..72a11602b0e1fd 100644 --- a/Modules/sha3module.c +++ b/Modules/sha3module.c @@ -66,6 +66,8 @@ typedef struct { Hacl_Hash_SHA3_state_t *hash_state; } SHA3object; +#define _SHA3object_CAST(op) ((SHA3object *)(op)) + #include "clinic/sha3module.c.h" static SHA3object * @@ -167,8 +169,9 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity) /* Internal methods for a hash object */ static int -SHA3_clear(SHA3object *self) +SHA3_clear(PyObject *op) { + SHA3object *self = _SHA3object_CAST(op); if (self->hash_state != NULL) { Hacl_Hash_SHA3_free(self->hash_state); self->hash_state = NULL; @@ -177,7 +180,7 @@ SHA3_clear(SHA3object *self) } static void -SHA3_dealloc(SHA3object *self) +SHA3_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); @@ -303,15 +306,16 @@ static PyMethodDef SHA3_methods[] = { static PyObject * -SHA3_get_block_size(SHA3object *self, void *closure) +SHA3_get_block_size(PyObject *op, void *Py_UNUSED(closure)) { + SHA3object *self = _SHA3object_CAST(op); uint32_t rate = Hacl_Hash_SHA3_block_len(self->hash_state); return PyLong_FromLong(rate); } static PyObject * -SHA3_get_name(SHA3object *self, void *closure) +SHA3_get_name(PyObject *self, void *Py_UNUSED(closure)) { PyTypeObject *type = Py_TYPE(self); @@ -338,9 +342,10 @@ SHA3_get_name(SHA3object *self, void *closure) static PyObject * -SHA3_get_digest_size(SHA3object *self, void *closure) +SHA3_get_digest_size(PyObject *op, void *Py_UNUSED(closure)) { // Preserving previous behavior: variable-length algorithms return 0 + SHA3object *self = _SHA3object_CAST(op); if (Hacl_Hash_SHA3_is_shake(self->hash_state)) return PyLong_FromLong(0); else @@ -349,8 +354,9 @@ SHA3_get_digest_size(SHA3object *self, void *closure) static PyObject * -SHA3_get_capacity_bits(SHA3object *self, void *closure) +SHA3_get_capacity_bits(PyObject *op, void *Py_UNUSED(closure)) { + SHA3object *self = _SHA3object_CAST(op); uint32_t rate = Hacl_Hash_SHA3_block_len(self->hash_state) * 8; assert(rate <= 1600); int capacity = 1600 - rate; @@ -359,26 +365,27 @@ SHA3_get_capacity_bits(SHA3object *self, void *closure) static PyObject * -SHA3_get_rate_bits(SHA3object *self, void *closure) +SHA3_get_rate_bits(PyObject *op, void *Py_UNUSED(closure)) { + SHA3object *self = _SHA3object_CAST(op); uint32_t rate = Hacl_Hash_SHA3_block_len(self->hash_state) * 8; return PyLong_FromLong(rate); } static PyObject * -SHA3_get_suffix(SHA3object *self, void *closure) +SHA3_get_suffix(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { unsigned char suffix[2] = {0x06, 0}; return PyBytes_FromStringAndSize((const char *)suffix, 1); } static PyGetSetDef SHA3_getseters[] = { - {"block_size", (getter)SHA3_get_block_size, NULL, NULL, NULL}, - {"name", (getter)SHA3_get_name, NULL, NULL, NULL}, - {"digest_size", (getter)SHA3_get_digest_size, NULL, NULL, NULL}, - {"_capacity_bits", (getter)SHA3_get_capacity_bits, NULL, NULL, NULL}, - {"_rate_bits", (getter)SHA3_get_rate_bits, NULL, NULL, NULL}, - {"_suffix", (getter)SHA3_get_suffix, NULL, NULL, NULL}, + {"block_size", SHA3_get_block_size, NULL, NULL, NULL}, + {"name", SHA3_get_name, NULL, NULL, NULL}, + {"digest_size", SHA3_get_digest_size, NULL, NULL, NULL}, + {"_capacity_bits", SHA3_get_capacity_bits, NULL, NULL, NULL}, + {"_rate_bits", SHA3_get_rate_bits, NULL, NULL, NULL}, + {"_suffix", SHA3_get_suffix, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; @@ -438,10 +445,11 @@ SHA3_TYPE_SLOTS(sha3_512_slots, sha3_512__doc__, SHA3_methods, SHA3_getseters); SHA3_TYPE_SPEC(sha3_512_spec, "sha3_512", sha3_512_slots); static PyObject * -_SHAKE_digest(SHA3object *self, unsigned long digestlen, int hex) +_SHAKE_digest(PyObject *op, unsigned long digestlen, int hex) { unsigned char *digest = NULL; PyObject *result = NULL; + SHA3object *self = _SHA3object_CAST(op); if (digestlen >= (1 << 29)) { PyErr_SetString(PyExc_ValueError, "length is too large"); @@ -483,7 +491,7 @@ static PyObject * _sha3_shake_128_digest_impl(SHA3object *self, unsigned long length) /*[clinic end generated code: output=2313605e2f87bb8f input=418ef6a36d2e6082]*/ { - return _SHAKE_digest(self, length, 0); + return _SHAKE_digest((PyObject *)self, length, 0); } @@ -500,17 +508,17 @@ static PyObject * _sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length) /*[clinic end generated code: output=bf8e2f1e490944a8 input=69fb29b0926ae321]*/ { - return _SHAKE_digest(self, length, 1); + return _SHAKE_digest((PyObject *)self, length, 1); } static PyObject * -SHAKE_get_digest_size(SHA3object *self, void *closure) +SHAKE_get_digest_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyLong_FromLong(0); } static PyObject * -SHAKE_get_suffix(SHA3object *self, void *closure) +SHAKE_get_suffix(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { unsigned char suffix[2] = {0x1f, 0}; return PyBytes_FromStringAndSize((const char *)suffix, 1); @@ -518,12 +526,12 @@ SHAKE_get_suffix(SHA3object *self, void *closure) static PyGetSetDef SHAKE_getseters[] = { - {"block_size", (getter)SHA3_get_block_size, NULL, NULL, NULL}, - {"name", (getter)SHA3_get_name, NULL, NULL, NULL}, - {"digest_size", (getter)SHAKE_get_digest_size, NULL, NULL, NULL}, - {"_capacity_bits", (getter)SHA3_get_capacity_bits, NULL, NULL, NULL}, - {"_rate_bits", (getter)SHA3_get_rate_bits, NULL, NULL, NULL}, - {"_suffix", (getter)SHAKE_get_suffix, NULL, NULL, NULL}, + {"block_size", SHA3_get_block_size, NULL, NULL, NULL}, + {"name", SHA3_get_name, NULL, NULL, NULL}, + {"digest_size", SHAKE_get_digest_size, NULL, NULL, NULL}, + {"_capacity_bits", SHA3_get_capacity_bits, NULL, NULL, NULL}, + {"_rate_bits", SHA3_get_rate_bits, NULL, NULL, NULL}, + {"_suffix", SHAKE_get_suffix, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 0e53a36bca55f0..b679b83bed5365 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -245,7 +245,8 @@ report_wakeup_write_error(void *data) errno = (int) (intptr_t) data; PyObject *exc = PyErr_GetRaisedException(); PyErr_SetFromErrno(PyExc_OSError); - PyErr_FormatUnraisable("Exception ignored when trying to write to the signal wakeup fd"); + PyErr_FormatUnraisable("Exception ignored while " + "trying to write to the signal wakeup fd"); PyErr_SetRaisedException(exc); errno = save_errno; return 0; @@ -262,7 +263,8 @@ report_wakeup_send_error(void* data) recognizes the error codes used by both GetLastError() and WSAGetLastError */ PyErr_SetExcFromWindowsErr(PyExc_OSError, send_errno); - PyErr_FormatUnraisable("Exception ignored when trying to send to the signal wakeup fd"); + PyErr_FormatUnraisable("Exception ignored while " + "trying to send to the signal wakeup fd"); PyErr_SetRaisedException(exc); return 0; } @@ -1837,7 +1839,8 @@ _PyErr_CheckSignalsTstate(PyThreadState *tstate) PyErr_Format(PyExc_OSError, "Signal %i ignored due to race condition", i); - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "calling signal handler"); continue; } PyObject *arglist = NULL; diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index e70aa304f2f3a3..b178eb42ac8e6a 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -110,6 +110,7 @@ Local naming conventions: #include "pycore_fileutils.h" // _Py_set_inheritable() #include "pycore_moduleobject.h" // _PyModule_GetState #include "pycore_time.h" // _PyTime_AsMilliseconds() +#include "pycore_pystate.h" // _Py_AssertHoldsTstate() #include "pycore_pyatomic_ft_wrappers.h" #ifdef _Py_MEMORY_SANITIZER @@ -601,6 +602,8 @@ get_sock_fd(PySocketSockObject *s) #endif } +#define _PySocketSockObject_CAST(op) ((PySocketSockObject *)(op)) + static inline socket_state * get_module_state(PyObject *mod) { @@ -822,8 +825,8 @@ internal_select(PySocketSockObject *s, int writing, PyTime_t interval, struct timeval tv, *tvp; #endif - /* must be called with the GIL held */ - assert(PyGILState_Check()); + /* must be called with a thread state */ + _Py_AssertHoldsTstate(); /* Error condition is for output only */ assert(!(connect && !writing)); @@ -936,8 +939,8 @@ sock_call_ex(PySocketSockObject *s, int deadline_initialized = 0; int res; - /* sock_call() must be called with the GIL held. */ - assert(PyGILState_Check()); + /* sock_call() must be called with a thread state. */ + _Py_AssertHoldsTstate(); /* outer loop to retry select() when select() is interrupted by a signal or to retry select()+sock_func() on false positive (see above) */ @@ -2928,8 +2931,10 @@ sock_accept_impl(PySocketSockObject *s, void *data) /* s._accept() -> (fd, address) */ static PyObject * -sock_accept(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_accept(PyObject *self, PyObject *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + sock_addr_t addrbuf; SOCKET_T newfd; socklen_t addrlen; @@ -2947,6 +2952,8 @@ sock_accept(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) ctx.addrlen = &addrlen; ctx.addrbuf = &addrbuf; + ctx.result = INVALID_SOCKET; + if (sock_call(s, 0, sock_accept_impl, &ctx) < 0) return NULL; newfd = ctx.result; @@ -3009,7 +3016,7 @@ For IP sockets, the address info is a pair (hostaddr, port)."); */ static PyObject * -sock_setblocking(PySocketSockObject *s, PyObject *arg) +sock_setblocking(PyObject *self, PyObject *arg) { long block; @@ -3017,6 +3024,7 @@ sock_setblocking(PySocketSockObject *s, PyObject *arg) if (block < 0) return NULL; + PySocketSockObject *s = _PySocketSockObject_CAST(self); s->sock_timeout = _PyTime_FromSeconds(block ? -1 : 0); if (internal_setblocking(s, block) == -1) { return NULL; @@ -3036,8 +3044,9 @@ setblocking(False) is equivalent to settimeout(0.0)."); False if it is in non-blocking mode. */ static PyObject * -sock_getblocking(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_getblocking(PyObject *self, PyObject *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); if (s->sock_timeout) { Py_RETURN_TRUE; } @@ -3100,13 +3109,14 @@ socket_parse_timeout(PyTime_t *timeout, PyObject *timeout_obj) < 0 -- illegal; raises an exception */ static PyObject * -sock_settimeout(PySocketSockObject *s, PyObject *arg) +sock_settimeout(PyObject *self, PyObject *arg) { PyTime_t timeout; if (socket_parse_timeout(&timeout, arg) < 0) return NULL; + PySocketSockObject *s = _PySocketSockObject_CAST(self); s->sock_timeout = timeout; int block = timeout < 0; @@ -3148,8 +3158,9 @@ Setting a timeout of zero is the same as setblocking(0)."); /* s.gettimeout() method. Returns the timeout associated with a socket. */ static PyObject * -sock_gettimeout(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_gettimeout_impl(PyObject *self, void *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); if (s->sock_timeout < 0) { Py_RETURN_NONE; } @@ -3159,6 +3170,18 @@ sock_gettimeout(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) } } +static inline PyObject * +sock_gettimeout_method(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return sock_gettimeout_impl(self, NULL); +} + +static inline PyObject * +sock_gettimeout_getter(PyObject *self, void *Py_UNUSED(closure)) +{ + return sock_gettimeout_impl(self, NULL); +} + PyDoc_STRVAR(gettimeout_doc, "gettimeout() -> timeout\n\ \n\ @@ -3176,8 +3199,10 @@ operations are disabled."); */ static PyObject * -sock_setsockopt(PySocketSockObject *s, PyObject *args) +sock_setsockopt(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + int level; int optname; int res; @@ -3275,8 +3300,10 @@ None, optlen."); use optional built-in module 'struct' to decode the string. */ static PyObject * -sock_getsockopt(PySocketSockObject *s, PyObject *args) +sock_getsockopt(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + int level; int optname; int res; @@ -3350,8 +3377,10 @@ string of that length; otherwise it is an integer."); /* s.bind(sockaddr) method */ static PyObject * -sock_bind(PySocketSockObject *s, PyObject *addro) +sock_bind(PyObject *self, PyObject *addro) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + sock_addr_t addrbuf; int addrlen; int res; @@ -3422,8 +3451,9 @@ _socket_socket_close_impl(PySocketSockObject *s) } static PyObject * -sock_detach(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); SOCKET_T fd = get_sock_fd(s); set_sock_fd(s, INVALID_SOCKET); return PyLong_FromSocket_t(fd); @@ -3539,8 +3569,10 @@ internal_connect(PySocketSockObject *s, struct sockaddr *addr, int addrlen, /* s.connect(sockaddr) method */ static PyObject * -sock_connect(PySocketSockObject *s, PyObject *addro) +sock_connect(PyObject *self, PyObject *addro) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + sock_addr_t addrbuf; int addrlen; int res; @@ -3572,8 +3604,10 @@ is a pair (host, port)."); /* s.connect_ex(sockaddr) method */ static PyObject * -sock_connect_ex(PySocketSockObject *s, PyObject *addro) +sock_connect_ex(PyObject *self, PyObject *addro) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + sock_addr_t addrbuf; int addrlen; int res; @@ -3605,8 +3639,9 @@ instead of raising an exception when an error occurs."); /* s.fileno() method */ static PyObject * -sock_fileno(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); return PyLong_FromSocket_t(get_sock_fd(s)); } @@ -3620,8 +3655,10 @@ Return the integer file descriptor of the socket."); /* s.getsockname() method */ static PyObject * -sock_getsockname(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_getsockname(PyObject *self, PyObject *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + sock_addr_t addrbuf; int res; socklen_t addrlen; @@ -3652,8 +3689,10 @@ address family. For IPv4 sockets, the address info is a pair\n\ /* s.getpeername() method */ static PyObject * -sock_getpeername(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +sock_getpeername(PyObject *self, PyObject *Py_UNUSED(ignored)) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + sock_addr_t addrbuf; int res; socklen_t addrlen; @@ -3683,8 +3722,9 @@ info is a pair (hostaddr, port)."); /* s.listen(n) method */ static PyObject * -sock_listen(PySocketSockObject *s, PyObject *args) +sock_listen(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); /* We try to choose a default backlog high enough to avoid connection drops * for common workloads, yet not too high to limit resource usage. */ int backlog = Py_MIN(SOMAXCONN, 128); @@ -3773,8 +3813,10 @@ sock_recv_guts(PySocketSockObject *s, char* cbuf, Py_ssize_t len, int flags) /* s.recv(nbytes [,flags]) method */ static PyObject * -sock_recv(PySocketSockObject *s, PyObject *args) +sock_recv(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + Py_ssize_t recvlen, outlen; int flags = 0; PyObject *buf; @@ -3822,9 +3864,10 @@ the remote end is closed and all data is read, return the empty string."); /* s.recv_into(buffer, [nbytes [,flags]]) method */ static PyObject* -sock_recv_into(PySocketSockObject *s, PyObject *args, PyObject *kwds) +sock_recv_into(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"buffer", "nbytes", "flags", 0}; + PySocketSockObject *s = _PySocketSockObject_CAST(self); int flags = 0; Py_buffer pbuf; @@ -3958,8 +4001,10 @@ sock_recvfrom_guts(PySocketSockObject *s, char* cbuf, Py_ssize_t len, int flags, /* s.recvfrom(nbytes [,flags]) method */ static PyObject * -sock_recvfrom(PySocketSockObject *s, PyObject *args) +sock_recvfrom(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + PyObject *buf = NULL; PyObject *addr = NULL; PyObject *ret = NULL; @@ -4010,9 +4055,10 @@ Like recv(buffersize, flags) but also return the sender's address info."); /* s.recvfrom_into(buffer[, nbytes [,flags]]) method */ static PyObject * -sock_recvfrom_into(PySocketSockObject *s, PyObject *args, PyObject* kwds) +sock_recvfrom_into(PyObject *self, PyObject *args, PyObject* kwds) { static char *kwlist[] = {"buffer", "nbytes", "flags", 0}; + PySocketSockObject *s = _PySocketSockObject_CAST(self); int flags = 0; Py_buffer pbuf; @@ -4238,8 +4284,10 @@ makeval_recvmsg(ssize_t received, void *data) /* s.recvmsg(bufsize[, ancbufsize[, flags]]) method */ static PyObject * -sock_recvmsg(PySocketSockObject *s, PyObject *args) +sock_recvmsg(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + Py_ssize_t bufsize, ancbufsize = 0; int flags = 0; struct iovec iov; @@ -4305,8 +4353,10 @@ makeval_recvmsg_into(ssize_t received, void *data) /* s.recvmsg_into(buffers[, ancbufsize[, flags]]) method */ static PyObject * -sock_recvmsg_into(PySocketSockObject *s, PyObject *args) +sock_recvmsg_into(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + Py_ssize_t ancbufsize = 0; int flags = 0; struct iovec *iovs = NULL; @@ -4416,8 +4466,10 @@ sock_send_impl(PySocketSockObject *s, void *data) /* s.send(data [,flags]) method */ static PyObject * -sock_send(PySocketSockObject *s, PyObject *args) +sock_send(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + int flags = 0; Py_buffer pbuf; struct sock_send ctx; @@ -4452,8 +4504,10 @@ sent; this may be less than len(data) if the network is busy."); /* s.sendall(data [,flags]) method */ static PyObject * -sock_sendall(PySocketSockObject *s, PyObject *args) +sock_sendall(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + char *buf; Py_ssize_t len, n; int flags = 0; @@ -4557,8 +4611,10 @@ sock_sendto_impl(PySocketSockObject *s, void *data) /* s.sendto(data, [flags,] sockaddr) method */ static PyObject * -sock_sendto(PySocketSockObject *s, PyObject *args) +sock_sendto(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + Py_buffer pbuf; PyObject *addro; Py_ssize_t arglen; @@ -4701,8 +4757,10 @@ sock_sendmsg_impl(PySocketSockObject *s, void *data) /* s.sendmsg(buffers[, ancdata[, flags[, address]]]) method */ static PyObject * -sock_sendmsg(PySocketSockObject *s, PyObject *args) +sock_sendmsg(PyObject *self, PyObject *args) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + Py_ssize_t i, ndatabufs = 0, ncmsgs, ncmsgbufs = 0; Py_buffer *databufs = NULL; sock_addr_t addrbuf; @@ -4905,8 +4963,10 @@ data sent."); #ifdef HAVE_SOCKADDR_ALG static PyObject* -sock_sendmsg_afalg(PySocketSockObject *self, PyObject *args, PyObject *kwds) +sock_sendmsg_afalg(PyObject *s, PyObject *args, PyObject *kwds) { + PySocketSockObject *self = _PySocketSockObject_CAST(s); + PyObject *retval = NULL; Py_ssize_t i, ndatabufs = 0; @@ -5073,8 +5133,10 @@ operation socket."); /* s.shutdown(how) method */ static PyObject * -sock_shutdown(PySocketSockObject *s, PyObject *arg) +sock_shutdown(PyObject *self, PyObject *arg) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + int how; int res; @@ -5098,8 +5160,10 @@ of the socket (flag == SHUT_WR), or both ends (flag == SHUT_RDWR)."); #if defined(MS_WINDOWS) && defined(SIO_RCVALL) static PyObject* -sock_ioctl(PySocketSockObject *s, PyObject *arg) +sock_ioctl(PyObject *self, PyObject *arg) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + unsigned long cmd = SIO_RCVALL; PyObject *argO; DWORD recv; @@ -5154,8 +5218,10 @@ SIO_LOOPBACK_FAST_PATH: 'option' is a boolean value, and is disabled by default" #if defined(MS_WINDOWS) static PyObject* -sock_share(PySocketSockObject *s, PyObject *arg) +sock_share(PyObject *self, PyObject *arg) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + WSAPROTOCOL_INFOW info; DWORD processId; int result; @@ -5185,93 +5251,82 @@ socket.fromshare()."); static PyMethodDef sock_methods[] = { #if defined(HAVE_ACCEPT) || defined(HAVE_ACCEPT4) - {"_accept", (PyCFunction)sock_accept, METH_NOARGS, - accept_doc}, + {"_accept", sock_accept, METH_NOARGS, accept_doc}, #endif #ifdef HAVE_BIND - {"bind", (PyCFunction)sock_bind, METH_O, - bind_doc}, + {"bind", sock_bind, METH_O, bind_doc}, #endif _SOCKET_SOCKET_CLOSE_METHODDEF #ifdef HAVE_CONNECT - {"connect", (PyCFunction)sock_connect, METH_O, - connect_doc}, - {"connect_ex", (PyCFunction)sock_connect_ex, METH_O, - connect_ex_doc}, -#endif - {"detach", (PyCFunction)sock_detach, METH_NOARGS, - detach_doc}, - {"fileno", (PyCFunction)sock_fileno, METH_NOARGS, - fileno_doc}, + {"connect", sock_connect, METH_O, connect_doc}, + {"connect_ex", sock_connect_ex, METH_O, connect_ex_doc}, +#endif + {"detach", sock_detach, METH_NOARGS, detach_doc}, + {"fileno", sock_fileno, METH_NOARGS, fileno_doc}, #ifdef HAVE_GETPEERNAME - {"getpeername", (PyCFunction)sock_getpeername, - METH_NOARGS, getpeername_doc}, + {"getpeername", sock_getpeername, METH_NOARGS, getpeername_doc}, #endif #ifdef HAVE_GETSOCKNAME - {"getsockname", (PyCFunction)sock_getsockname, - METH_NOARGS, getsockname_doc}, + {"getsockname", sock_getsockname, METH_NOARGS, getsockname_doc}, #endif - {"getsockopt", (PyCFunction)sock_getsockopt, METH_VARARGS, - getsockopt_doc}, + {"getsockopt", sock_getsockopt, METH_VARARGS, getsockopt_doc}, #if defined(MS_WINDOWS) && defined(SIO_RCVALL) - {"ioctl", (PyCFunction)sock_ioctl, METH_VARARGS, - sock_ioctl_doc}, + {"ioctl", sock_ioctl, METH_VARARGS, sock_ioctl_doc}, #endif #if defined(MS_WINDOWS) - {"share", (PyCFunction)sock_share, METH_VARARGS, - sock_share_doc}, + {"share", sock_share, METH_VARARGS, sock_share_doc}, #endif #ifdef HAVE_LISTEN - {"listen", (PyCFunction)sock_listen, METH_VARARGS, - listen_doc}, + {"listen", sock_listen, METH_VARARGS, listen_doc}, #endif - {"recv", (PyCFunction)sock_recv, METH_VARARGS, - recv_doc}, - {"recv_into", _PyCFunction_CAST(sock_recv_into), METH_VARARGS | METH_KEYWORDS, - recv_into_doc}, + {"recv", sock_recv, METH_VARARGS, recv_doc}, + { + "recv_into", + _PyCFunction_CAST(sock_recv_into), + METH_VARARGS | METH_KEYWORDS, + recv_into_doc + }, #ifdef HAVE_RECVFROM - {"recvfrom", (PyCFunction)sock_recvfrom, METH_VARARGS, - recvfrom_doc}, - {"recvfrom_into", _PyCFunction_CAST(sock_recvfrom_into), METH_VARARGS | METH_KEYWORDS, - recvfrom_into_doc}, -#endif - {"send", (PyCFunction)sock_send, METH_VARARGS, - send_doc}, - {"sendall", (PyCFunction)sock_sendall, METH_VARARGS, - sendall_doc}, + {"recvfrom", sock_recvfrom, METH_VARARGS, recvfrom_doc}, + { + "recvfrom_into", + _PyCFunction_CAST(sock_recvfrom_into), + METH_VARARGS | METH_KEYWORDS, + recvfrom_into_doc + }, +#endif + {"send", sock_send, METH_VARARGS, send_doc}, + {"sendall", sock_sendall, METH_VARARGS, sendall_doc}, #ifdef HAVE_SENDTO - {"sendto", (PyCFunction)sock_sendto, METH_VARARGS, - sendto_doc}, -#endif - {"setblocking", (PyCFunction)sock_setblocking, METH_O, - setblocking_doc}, - {"getblocking", (PyCFunction)sock_getblocking, METH_NOARGS, - getblocking_doc}, - {"settimeout", (PyCFunction)sock_settimeout, METH_O, - settimeout_doc}, - {"gettimeout", (PyCFunction)sock_gettimeout, METH_NOARGS, - gettimeout_doc}, + {"sendto", sock_sendto, METH_VARARGS, sendto_doc}, +#endif + {"setblocking", sock_setblocking, METH_O, setblocking_doc}, + {"getblocking", sock_getblocking, METH_NOARGS, getblocking_doc}, + {"settimeout", sock_settimeout, METH_O, settimeout_doc}, + { + "gettimeout", sock_gettimeout_method, METH_NOARGS, + gettimeout_doc + }, #ifdef HAVE_SETSOCKOPT - {"setsockopt", (PyCFunction)sock_setsockopt, METH_VARARGS, - setsockopt_doc}, + {"setsockopt", sock_setsockopt, METH_VARARGS, setsockopt_doc}, #endif #ifdef HAVE_SHUTDOWN - {"shutdown", (PyCFunction)sock_shutdown, METH_O, - shutdown_doc}, + {"shutdown", sock_shutdown, METH_O, shutdown_doc}, #endif #ifdef CMSG_LEN - {"recvmsg", (PyCFunction)sock_recvmsg, METH_VARARGS, - recvmsg_doc}, - {"recvmsg_into", (PyCFunction)sock_recvmsg_into, METH_VARARGS, - recvmsg_into_doc,}, - {"sendmsg", (PyCFunction)sock_sendmsg, METH_VARARGS, - sendmsg_doc}, + {"recvmsg", sock_recvmsg, METH_VARARGS, recvmsg_doc}, + {"recvmsg_into", sock_recvmsg_into, METH_VARARGS, recvmsg_into_doc}, + {"sendmsg", sock_sendmsg, METH_VARARGS, sendmsg_doc}, #endif #ifdef HAVE_SOCKADDR_ALG - {"sendmsg_afalg", _PyCFunction_CAST(sock_sendmsg_afalg), METH_VARARGS | METH_KEYWORDS, - sendmsg_afalg_doc}, + { + "sendmsg_afalg", + _PyCFunction_CAST(sock_sendmsg_afalg), + METH_VARARGS | METH_KEYWORDS, + sendmsg_afalg_doc + }, #endif - {NULL, NULL} /* sentinel */ + {NULL, NULL, 0, NULL} /* sentinel */ }; /* SockObject members */ @@ -5283,7 +5338,7 @@ static PyMemberDef sock_memberlist[] = { }; static PyGetSetDef sock_getsetlist[] = { - {"timeout", (getter)sock_gettimeout, NULL, PyDoc_STR("the socket timeout")}, + {"timeout", sock_gettimeout_getter, NULL, PyDoc_STR("the socket timeout")}, {NULL} /* sentinel */ }; @@ -5291,8 +5346,10 @@ static PyGetSetDef sock_getsetlist[] = { First close the file description. */ static void -sock_finalize(PySocketSockObject *s) +sock_finalize(PyObject *self) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + SOCKET_T fd; /* Save the current exception, if any. */ @@ -5324,28 +5381,30 @@ sock_finalize(PySocketSockObject *s) } static int -sock_traverse(PySocketSockObject *s, visitproc visit, void *arg) +sock_traverse(PyObject *s, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(s)); return 0; } static void -sock_dealloc(PySocketSockObject *s) +sock_dealloc(PyObject *s) { - if (PyObject_CallFinalizerFromDealloc((PyObject *)s) < 0) { + if (PyObject_CallFinalizerFromDealloc(s) < 0) { return; } PyTypeObject *tp = Py_TYPE(s); PyObject_GC_UnTrack(s); - tp->tp_free((PyObject *)s); + tp->tp_free(s); Py_DECREF(tp); } static PyObject * -sock_repr(PySocketSockObject *s) +sock_repr(PyObject *self) { + PySocketSockObject *s = _PySocketSockObject_CAST(self); + long sock_fd; /* On Windows, this test is needed because SOCKET_T is unsigned */ if (get_sock_fd(s) == INVALID_SOCKET) { diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index 14e7ca591a076b..aa1bc9da91dfb9 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -176,7 +176,7 @@ syslog_openlog_impl(PyObject *module, PyObject *ident, long logopt, } } if (PySys_Audit("syslog.openlog", "Oll", ident ? ident : Py_None, logopt, facility) < 0) { - Py_DECREF(ident); + Py_XDECREF(ident); return NULL; } @@ -258,7 +258,7 @@ syslog_closelog_impl(PyObject *module) // Since the sys.closelog changes the process level state of syslog library, // this operation is only allowed for the main interpreter. if (!is_main_interpreter()) { - PyErr_SetString(PyExc_RuntimeError, "sunbinterpreter can't use syslog.closelog()"); + PyErr_SetString(PyExc_RuntimeError, "subinterpreter can't use syslog.closelog()"); return NULL; } diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 340011fc08b551..8d2cbff662b9a3 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -913,9 +913,10 @@ time_strftime(PyObject *module, PyObject *args) PyErr_NoMemory(); return NULL; } - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } Py_ssize_t i = 0; while (i < format_size) { fmtlen = 0; @@ -933,7 +934,7 @@ time_strftime(PyObject *module, PyObject *args) if (unicode == NULL) { goto error; } - if (_PyUnicodeWriter_WriteStr(&writer, unicode) < 0) { + if (PyUnicodeWriter_WriteStr(writer, unicode) < 0) { Py_DECREF(unicode); goto error; } @@ -947,18 +948,18 @@ time_strftime(PyObject *module, PyObject *args) break; } } - if (_PyUnicodeWriter_WriteSubstring(&writer, format_arg, start, i) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format_arg, start, i) < 0) { goto error; } } PyMem_Free(outbuf); PyMem_Free(format); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: PyMem_Free(outbuf); PyMem_Free(format); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } @@ -978,7 +979,7 @@ time_strptime(PyObject *self, PyObject *args) { PyObject *func, *result; - func = _PyImport_GetModuleAttrString("_strptime", "_strptime_time"); + func = PyImport_ImportModuleAttrString("_strptime", "_strptime_time"); if (!func) { return NULL; } diff --git a/Objects/abstract.c b/Objects/abstract.c index c92ef10aa79648..db7b9263711f68 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -583,7 +583,7 @@ PyBuffer_SizeFromFormat(const char *format) PyObject *fmt = NULL; Py_ssize_t itemsize = -1; - calcsize = _PyImport_GetModuleAttrString("struct", "calcsize"); + calcsize = PyImport_ImportModuleAttrString("struct", "calcsize"); if (calcsize == NULL) { goto done; } diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 871f99b6f885ba..21584332e0e443 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -2113,8 +2113,9 @@ PyDoc_STRVAR(alloc_doc, Return the number of bytes actually allocated."); static PyObject * -bytearray_alloc(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_alloc(PyObject *op, PyObject *Py_UNUSED(ignored)) { + PyByteArrayObject *self = _PyByteArray_CAST(op); return PyLong_FromSsize_t(self->ob_alloc); } @@ -2313,7 +2314,7 @@ static PyBufferProcs bytearray_as_buffer = { }; static PyMethodDef bytearray_methods[] = { - {"__alloc__", (PyCFunction)bytearray_alloc, METH_NOARGS, alloc_doc}, + {"__alloc__", bytearray_alloc, METH_NOARGS, alloc_doc}, BYTEARRAY_REDUCE_METHODDEF BYTEARRAY_REDUCE_EX_METHODDEF BYTEARRAY_SIZEOF_METHODDEF @@ -2464,24 +2465,29 @@ typedef struct { PyByteArrayObject *it_seq; /* Set to NULL when iterator is exhausted */ } bytesiterobject; +#define _bytesiterobject_CAST(op) ((bytesiterobject *)(op)) + static void -bytearrayiter_dealloc(bytesiterobject *it) +bytearrayiter_dealloc(PyObject *self) { + bytesiterobject *it = _bytesiterobject_CAST(self); _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); PyObject_GC_Del(it); } static int -bytearrayiter_traverse(bytesiterobject *it, visitproc visit, void *arg) +bytearrayiter_traverse(PyObject *self, visitproc visit, void *arg) { + bytesiterobject *it = _bytesiterobject_CAST(self); Py_VISIT(it->it_seq); return 0; } static PyObject * -bytearrayiter_next(bytesiterobject *it) +bytearrayiter_next(PyObject *self) { + bytesiterobject *it = _bytesiterobject_CAST(self); PyByteArrayObject *seq; assert(it != NULL); @@ -2501,8 +2507,9 @@ bytearrayiter_next(bytesiterobject *it) } static PyObject * -bytearrayiter_length_hint(bytesiterobject *it, PyObject *Py_UNUSED(ignored)) +bytearrayiter_length_hint(PyObject *self, PyObject *Py_UNUSED(ignored)) { + bytesiterobject *it = _bytesiterobject_CAST(self); Py_ssize_t len = 0; if (it->it_seq) { len = PyByteArray_GET_SIZE(it->it_seq) - it->it_index; @@ -2517,14 +2524,14 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); static PyObject * -bytearrayiter_reduce(bytesiterobject *it, PyObject *Py_UNUSED(ignored)) +bytearrayiter_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter)); /* _PyEval_GetBuiltin can invoke arbitrary code, * call must be before access of iterator pointers. * see issue #101765 */ - + bytesiterobject *it = _bytesiterobject_CAST(self); if (it->it_seq != NULL) { return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); } else { @@ -2533,11 +2540,13 @@ bytearrayiter_reduce(bytesiterobject *it, PyObject *Py_UNUSED(ignored)) } static PyObject * -bytearrayiter_setstate(bytesiterobject *it, PyObject *state) +bytearrayiter_setstate(PyObject *self, PyObject *state) { Py_ssize_t index = PyLong_AsSsize_t(state); if (index == -1 && PyErr_Occurred()) return NULL; + + bytesiterobject *it = _bytesiterobject_CAST(self); if (it->it_seq != NULL) { if (index < 0) index = 0; @@ -2551,11 +2560,11 @@ bytearrayiter_setstate(bytesiterobject *it, PyObject *state) PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); static PyMethodDef bytearrayiter_methods[] = { - {"__length_hint__", (PyCFunction)bytearrayiter_length_hint, METH_NOARGS, + {"__length_hint__", bytearrayiter_length_hint, METH_NOARGS, length_hint_doc}, - {"__reduce__", (PyCFunction)bytearrayiter_reduce, METH_NOARGS, + {"__reduce__", bytearrayiter_reduce, METH_NOARGS, bytearray_reduce__doc__}, - {"__setstate__", (PyCFunction)bytearrayiter_setstate, METH_O, + {"__setstate__", bytearrayiter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -2566,7 +2575,7 @@ PyTypeObject PyByteArrayIter_Type = { sizeof(bytesiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)bytearrayiter_dealloc, /* tp_dealloc */ + bytearrayiter_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -2583,12 +2592,12 @@ PyTypeObject PyByteArrayIter_Type = { 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ 0, /* tp_doc */ - (traverseproc)bytearrayiter_traverse, /* tp_traverse */ + bytearrayiter_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)bytearrayiter_next, /* tp_iternext */ + bytearrayiter_next, /* tp_iternext */ bytearrayiter_methods, /* tp_methods */ 0, }; diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 90e8a9af88b4a3..b3d1c425ad18b7 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -1205,7 +1205,8 @@ PyObject *PyBytes_DecodeEscape(const char *s, unsigned char c = *first_invalid_escape; if ('4' <= c && c <= '7') { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid octal escape sequence '\\%.3s'", + "b\"\\%.3s\" is an invalid octal escape sequence. " + "Such sequences will not work in the future. ", first_invalid_escape) < 0) { Py_DECREF(result); @@ -1214,7 +1215,8 @@ PyObject *PyBytes_DecodeEscape(const char *s, } else { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid escape sequence '\\%c'", + "b\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. ", c) < 0) { Py_DECREF(result); @@ -1223,7 +1225,6 @@ PyObject *PyBytes_DecodeEscape(const char *s, } } return result; - } /* -------------------------------------------------------------------- */ /* object api */ @@ -3085,7 +3086,7 @@ PyTypeObject PyBytes_Type = { bytes_doc, /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ - (richcmpfunc)bytes_richcompare, /* tp_richcompare */ + bytes_richcompare, /* tp_richcompare */ 0, /* tp_weaklistoffset */ bytes_iter, /* tp_iter */ 0, /* tp_iternext */ @@ -3245,24 +3246,29 @@ typedef struct { PyBytesObject *it_seq; /* Set to NULL when iterator is exhausted */ } striterobject; +#define _striterobject_CAST(op) ((striterobject *)(op)) + static void -striter_dealloc(striterobject *it) +striter_dealloc(PyObject *op) { + striterobject *it = _striterobject_CAST(op); _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); PyObject_GC_Del(it); } static int -striter_traverse(striterobject *it, visitproc visit, void *arg) +striter_traverse(PyObject *op, visitproc visit, void *arg) { + striterobject *it = _striterobject_CAST(op); Py_VISIT(it->it_seq); return 0; } static PyObject * -striter_next(striterobject *it) +striter_next(PyObject *op) { + striterobject *it = _striterobject_CAST(op); PyBytesObject *seq; assert(it != NULL); @@ -3282,8 +3288,9 @@ striter_next(striterobject *it) } static PyObject * -striter_len(striterobject *it, PyObject *Py_UNUSED(ignored)) +striter_len(PyObject *op, PyObject *Py_UNUSED(ignored)) { + striterobject *it = _striterobject_CAST(op); Py_ssize_t len = 0; if (it->it_seq) len = PyBytes_GET_SIZE(it->it_seq) - it->it_index; @@ -3294,14 +3301,14 @@ PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); static PyObject * -striter_reduce(striterobject *it, PyObject *Py_UNUSED(ignored)) +striter_reduce(PyObject *op, PyObject *Py_UNUSED(ignored)) { PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter)); /* _PyEval_GetBuiltin can invoke arbitrary code, * call must be before access of iterator pointers. * see issue #101765 */ - + striterobject *it = _striterobject_CAST(op); if (it->it_seq != NULL) { return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); } else { @@ -3312,11 +3319,12 @@ striter_reduce(striterobject *it, PyObject *Py_UNUSED(ignored)) PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); static PyObject * -striter_setstate(striterobject *it, PyObject *state) +striter_setstate(PyObject *op, PyObject *state) { Py_ssize_t index = PyLong_AsSsize_t(state); if (index == -1 && PyErr_Occurred()) return NULL; + striterobject *it = _striterobject_CAST(op); if (it->it_seq != NULL) { if (index < 0) index = 0; @@ -3330,12 +3338,9 @@ striter_setstate(striterobject *it, PyObject *state) PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); static PyMethodDef striter_methods[] = { - {"__length_hint__", (PyCFunction)striter_len, METH_NOARGS, - length_hint_doc}, - {"__reduce__", (PyCFunction)striter_reduce, METH_NOARGS, - reduce_doc}, - {"__setstate__", (PyCFunction)striter_setstate, METH_O, - setstate_doc}, + {"__length_hint__", striter_len, METH_NOARGS, length_hint_doc}, + {"__reduce__", striter_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", striter_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -3345,7 +3350,7 @@ PyTypeObject PyBytesIter_Type = { sizeof(striterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)striter_dealloc, /* tp_dealloc */ + striter_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -3362,12 +3367,12 @@ PyTypeObject PyBytesIter_Type = { 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ 0, /* tp_doc */ - (traverseproc)striter_traverse, /* tp_traverse */ + striter_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)striter_next, /* tp_iternext */ + striter_next, /* tp_iternext */ striter_methods, /* tp_methods */ 0, }; diff --git a/Objects/capsule.c b/Objects/capsule.c index 28965e0f21b7a0..16ae65905ef5ac 100644 --- a/Objects/capsule.c +++ b/Objects/capsule.c @@ -18,6 +18,8 @@ typedef struct { } PyCapsule; +#define _PyCapsule_CAST(op) ((PyCapsule *)(op)) + static int _is_legal_capsule(PyObject *op, const char *invalid_capsule) @@ -284,7 +286,7 @@ PyCapsule_Import(const char *name, int no_block) static void capsule_dealloc(PyObject *op) { - PyCapsule *capsule = (PyCapsule *)op; + PyCapsule *capsule = _PyCapsule_CAST(op); PyObject_GC_UnTrack(op); if (capsule->destructor) { capsule->destructor(op); @@ -296,7 +298,7 @@ capsule_dealloc(PyObject *op) static PyObject * capsule_repr(PyObject *o) { - PyCapsule *capsule = (PyCapsule *)o; + PyCapsule *capsule = _PyCapsule_CAST(o); const char *name; const char *quote; @@ -314,28 +316,27 @@ capsule_repr(PyObject *o) static int -capsule_traverse(PyCapsule *capsule, visitproc visit, void *arg) +capsule_traverse(PyObject *self, visitproc visit, void *arg) { // Capsule object is only tracked by the GC // if _PyCapsule_SetTraverse() is called, but // this can still be manually triggered by gc.get_referents() - + PyCapsule *capsule = _PyCapsule_CAST(self); if (capsule->traverse_func != NULL) { - return capsule->traverse_func((PyObject*)capsule, visit, arg); + return capsule->traverse_func(self, visit, arg); } - return 0; } static int -capsule_clear(PyCapsule *capsule) +capsule_clear(PyObject *self) { // Capsule object is only tracked by the GC // if _PyCapsule_SetTraverse() is called + PyCapsule *capsule = _PyCapsule_CAST(self); assert(capsule->clear_func != NULL); - - return capsule->clear_func((PyObject*)capsule); + return capsule->clear_func(self); } @@ -358,8 +359,8 @@ PyTypeObject PyCapsule_Type = { .tp_dealloc = capsule_dealloc, .tp_repr = capsule_repr, .tp_doc = PyCapsule_Type__doc__, - .tp_traverse = (traverseproc)capsule_traverse, - .tp_clear = (inquiry)capsule_clear, + .tp_traverse = capsule_traverse, + .tp_clear = capsule_clear, }; diff --git a/Objects/classobject.c b/Objects/classobject.c index 775894ad5a7166..58e1d17977322e 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -3,6 +3,7 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_VectorcallTstate() #include "pycore_ceval.h" // _PyEval_GetBuiltin() +#include "pycore_freelist.h" #include "pycore_object.h" #include "pycore_pyerrors.h" #include "pycore_pystate.h" // _PyThreadState_GET() @@ -112,9 +113,12 @@ PyMethod_New(PyObject *func, PyObject *self) PyErr_BadInternalCall(); return NULL; } - PyMethodObject *im = PyObject_GC_New(PyMethodObject, &PyMethod_Type); + PyMethodObject *im = _Py_FREELIST_POP(PyMethodObject, pymethodobjects); if (im == NULL) { - return NULL; + im = PyObject_GC_New(PyMethodObject, &PyMethod_Type); + if (im == NULL) { + return NULL; + } } im->im_weakreflist = NULL; im->im_func = Py_NewRef(func); @@ -245,7 +249,8 @@ method_dealloc(PyObject *self) PyObject_ClearWeakRefs((PyObject *)im); Py_DECREF(im->im_func); Py_XDECREF(im->im_self); - PyObject_GC_Del(im); + assert(Py_IS_TYPE(self, &PyMethod_Type)); + _Py_FREELIST_FREE(pymethodobjects, (PyObject *)im, PyObject_GC_Del); } static PyObject * diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h index dee7c1e8bffd25..91cf5363e639d1 100644 --- a/Objects/clinic/bytearrayobject.c.h +++ b/Objects/clinic/bytearrayobject.c.h @@ -123,7 +123,7 @@ bytearray_find_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_find(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_find(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -147,7 +147,7 @@ bytearray_find(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytearray_find_impl(self, sub, start, end); + return_value = bytearray_find_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -172,7 +172,7 @@ bytearray_count_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_count(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_count(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -196,7 +196,7 @@ bytearray_count(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional: - return_value = bytearray_count_impl(self, sub, start, end); + return_value = bytearray_count_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -215,9 +215,9 @@ static PyObject * bytearray_clear_impl(PyByteArrayObject *self); static PyObject * -bytearray_clear(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_clear_impl(self); + return bytearray_clear_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_copy__doc__, @@ -233,9 +233,9 @@ static PyObject * bytearray_copy_impl(PyByteArrayObject *self); static PyObject * -bytearray_copy(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_copy_impl(self); + return bytearray_copy_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_index__doc__, @@ -259,7 +259,7 @@ bytearray_index_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_index(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -283,7 +283,7 @@ bytearray_index(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional: - return_value = bytearray_index_impl(self, sub, start, end); + return_value = bytearray_index_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -310,7 +310,7 @@ bytearray_rfind_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_rfind(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_rfind(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -334,7 +334,7 @@ bytearray_rfind(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional: - return_value = bytearray_rfind_impl(self, sub, start, end); + return_value = bytearray_rfind_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -361,7 +361,7 @@ bytearray_rindex_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_rindex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_rindex(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -385,7 +385,7 @@ bytearray_rindex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg goto exit; } skip_optional: - return_value = bytearray_rindex_impl(self, sub, start, end); + return_value = bytearray_rindex_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -412,7 +412,7 @@ bytearray_startswith_impl(PyByteArrayObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_startswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_startswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -436,7 +436,7 @@ bytearray_startswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t goto exit; } skip_optional: - return_value = bytearray_startswith_impl(self, subobj, start, end); + return_value = bytearray_startswith_impl((PyByteArrayObject *)self, subobj, start, end); exit: return return_value; @@ -463,7 +463,7 @@ bytearray_endswith_impl(PyByteArrayObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_endswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_endswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -487,7 +487,7 @@ bytearray_endswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t na goto exit; } skip_optional: - return_value = bytearray_endswith_impl(self, subobj, start, end); + return_value = bytearray_endswith_impl((PyByteArrayObject *)self, subobj, start, end); exit: return return_value; @@ -510,7 +510,7 @@ static PyObject * bytearray_removeprefix_impl(PyByteArrayObject *self, Py_buffer *prefix); static PyObject * -bytearray_removeprefix(PyByteArrayObject *self, PyObject *arg) +bytearray_removeprefix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer prefix = {NULL, NULL}; @@ -518,7 +518,7 @@ bytearray_removeprefix(PyByteArrayObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &prefix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytearray_removeprefix_impl(self, &prefix); + return_value = bytearray_removeprefix_impl((PyByteArrayObject *)self, &prefix); exit: /* Cleanup for prefix */ @@ -546,7 +546,7 @@ static PyObject * bytearray_removesuffix_impl(PyByteArrayObject *self, Py_buffer *suffix); static PyObject * -bytearray_removesuffix(PyByteArrayObject *self, PyObject *arg) +bytearray_removesuffix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer suffix = {NULL, NULL}; @@ -554,7 +554,7 @@ bytearray_removesuffix(PyByteArrayObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &suffix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytearray_removesuffix_impl(self, &suffix); + return_value = bytearray_removesuffix_impl((PyByteArrayObject *)self, &suffix); exit: /* Cleanup for suffix */ @@ -585,7 +585,7 @@ bytearray_translate_impl(PyByteArrayObject *self, PyObject *table, PyObject *deletechars); static PyObject * -bytearray_translate(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_translate(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -629,7 +629,7 @@ bytearray_translate(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t n } deletechars = args[1]; skip_optional_pos: - return_value = bytearray_translate_impl(self, table, deletechars); + return_value = bytearray_translate_impl((PyByteArrayObject *)self, table, deletechars); exit: return return_value; @@ -704,7 +704,7 @@ bytearray_replace_impl(PyByteArrayObject *self, Py_buffer *old, Py_buffer *new, Py_ssize_t count); static PyObject * -bytearray_replace(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_buffer old = {NULL, NULL}; @@ -736,7 +736,7 @@ bytearray_replace(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nar count = ival; } skip_optional: - return_value = bytearray_replace_impl(self, &old, &new, count); + return_value = bytearray_replace_impl((PyByteArrayObject *)self, &old, &new, count); exit: /* Cleanup for old */ @@ -773,7 +773,7 @@ bytearray_split_impl(PyByteArrayObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytearray_split(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -833,7 +833,7 @@ bytearray_split(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs maxsplit = ival; } skip_optional_pos: - return_value = bytearray_split_impl(self, sep, maxsplit); + return_value = bytearray_split_impl((PyByteArrayObject *)self, sep, maxsplit); exit: return return_value; @@ -896,7 +896,7 @@ bytearray_rsplit_impl(PyByteArrayObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytearray_rsplit(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_rsplit(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -956,7 +956,7 @@ bytearray_rsplit(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg maxsplit = ival; } skip_optional_pos: - return_value = bytearray_rsplit_impl(self, sep, maxsplit); + return_value = bytearray_rsplit_impl((PyByteArrayObject *)self, sep, maxsplit); exit: return return_value; @@ -975,9 +975,9 @@ static PyObject * bytearray_reverse_impl(PyByteArrayObject *self); static PyObject * -bytearray_reverse(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_reverse(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_reverse_impl(self); + return bytearray_reverse_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_insert__doc__, @@ -998,7 +998,7 @@ static PyObject * bytearray_insert_impl(PyByteArrayObject *self, Py_ssize_t index, int item); static PyObject * -bytearray_insert(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -1022,7 +1022,7 @@ bytearray_insert(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg if (!_getbytevalue(args[1], &item)) { goto exit; } - return_value = bytearray_insert_impl(self, index, item); + return_value = bytearray_insert_impl((PyByteArrayObject *)self, index, item); exit: return return_value; @@ -1044,7 +1044,7 @@ static PyObject * bytearray_append_impl(PyByteArrayObject *self, int item); static PyObject * -bytearray_append(PyByteArrayObject *self, PyObject *arg) +bytearray_append(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int item; @@ -1052,7 +1052,7 @@ bytearray_append(PyByteArrayObject *self, PyObject *arg) if (!_getbytevalue(arg, &item)) { goto exit; } - return_value = bytearray_append_impl(self, item); + return_value = bytearray_append_impl((PyByteArrayObject *)self, item); exit: return return_value; @@ -1089,7 +1089,7 @@ static PyObject * bytearray_pop_impl(PyByteArrayObject *self, Py_ssize_t index); static PyObject * -bytearray_pop(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index = -1; @@ -1113,7 +1113,7 @@ bytearray_pop(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) index = ival; } skip_optional: - return_value = bytearray_pop_impl(self, index); + return_value = bytearray_pop_impl((PyByteArrayObject *)self, index); exit: return return_value; @@ -1135,7 +1135,7 @@ static PyObject * bytearray_remove_impl(PyByteArrayObject *self, int value); static PyObject * -bytearray_remove(PyByteArrayObject *self, PyObject *arg) +bytearray_remove(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int value; @@ -1143,7 +1143,7 @@ bytearray_remove(PyByteArrayObject *self, PyObject *arg) if (!_getbytevalue(arg, &value)) { goto exit; } - return_value = bytearray_remove_impl(self, value); + return_value = bytearray_remove_impl((PyByteArrayObject *)self, value); exit: return return_value; @@ -1164,7 +1164,7 @@ static PyObject * bytearray_strip_impl(PyByteArrayObject *self, PyObject *bytes); static PyObject * -bytearray_strip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_strip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -1177,7 +1177,7 @@ bytearray_strip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs } bytes = args[0]; skip_optional: - return_value = bytearray_strip_impl(self, bytes); + return_value = bytearray_strip_impl((PyByteArrayObject *)self, bytes); exit: return return_value; @@ -1198,7 +1198,7 @@ static PyObject * bytearray_lstrip_impl(PyByteArrayObject *self, PyObject *bytes); static PyObject * -bytearray_lstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_lstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -1211,7 +1211,7 @@ bytearray_lstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg } bytes = args[0]; skip_optional: - return_value = bytearray_lstrip_impl(self, bytes); + return_value = bytearray_lstrip_impl((PyByteArrayObject *)self, bytes); exit: return return_value; @@ -1232,7 +1232,7 @@ static PyObject * bytearray_rstrip_impl(PyByteArrayObject *self, PyObject *bytes); static PyObject * -bytearray_rstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_rstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -1245,7 +1245,7 @@ bytearray_rstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg } bytes = args[0]; skip_optional: - return_value = bytearray_rstrip_impl(self, bytes); + return_value = bytearray_rstrip_impl((PyByteArrayObject *)self, bytes); exit: return return_value; @@ -1274,7 +1274,7 @@ bytearray_decode_impl(PyByteArrayObject *self, const char *encoding, const char *errors); static PyObject * -bytearray_decode(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1347,7 +1347,7 @@ bytearray_decode(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg goto exit; } skip_optional_pos: - return_value = bytearray_decode_impl(self, encoding, errors); + return_value = bytearray_decode_impl((PyByteArrayObject *)self, encoding, errors); exit: return return_value; @@ -1382,7 +1382,7 @@ static PyObject * bytearray_splitlines_impl(PyByteArrayObject *self, int keepends); static PyObject * -bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1427,7 +1427,7 @@ bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t goto exit; } skip_optional_pos: - return_value = bytearray_splitlines_impl(self, keepends); + return_value = bytearray_splitlines_impl((PyByteArrayObject *)self, keepends); exit: return return_value; @@ -1495,7 +1495,7 @@ static PyObject * bytearray_hex_impl(PyByteArrayObject *self, PyObject *sep, int bytes_per_sep); static PyObject * -bytearray_hex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_hex(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1547,7 +1547,7 @@ bytearray_hex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } skip_optional_pos: - return_value = bytearray_hex_impl(self, sep, bytes_per_sep); + return_value = bytearray_hex_impl((PyByteArrayObject *)self, sep, bytes_per_sep); exit: return return_value; @@ -1566,9 +1566,9 @@ static PyObject * bytearray_reduce_impl(PyByteArrayObject *self); static PyObject * -bytearray_reduce(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_reduce_impl(self); + return bytearray_reduce_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_reduce_ex__doc__, @@ -1584,7 +1584,7 @@ static PyObject * bytearray_reduce_ex_impl(PyByteArrayObject *self, int proto); static PyObject * -bytearray_reduce_ex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_reduce_ex(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int proto = 0; @@ -1600,7 +1600,7 @@ bytearray_reduce_ex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional: - return_value = bytearray_reduce_ex_impl(self, proto); + return_value = bytearray_reduce_ex_impl((PyByteArrayObject *)self, proto); exit: return return_value; @@ -1619,8 +1619,8 @@ static PyObject * bytearray_sizeof_impl(PyByteArrayObject *self); static PyObject * -bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_sizeof(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_sizeof_impl(self); + return bytearray_sizeof_impl((PyByteArrayObject *)self); } -/*[clinic end generated code: output=4488e38e7ffcc6ec input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bc8bec8514102bf3 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h index d2c6cc88770999..9aef736428ad0e 100644 --- a/Objects/clinic/bytesobject.c.h +++ b/Objects/clinic/bytesobject.c.h @@ -22,9 +22,9 @@ static PyObject * bytes___bytes___impl(PyBytesObject *self); static PyObject * -bytes___bytes__(PyBytesObject *self, PyObject *Py_UNUSED(ignored)) +bytes___bytes__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytes___bytes___impl(self); + return bytes___bytes___impl((PyBytesObject *)self); } PyDoc_STRVAR(bytes_split__doc__, @@ -48,7 +48,7 @@ static PyObject * bytes_split_impl(PyBytesObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytes_split(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -108,7 +108,7 @@ bytes_split(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje maxsplit = ival; } skip_optional_pos: - return_value = bytes_split_impl(self, sep, maxsplit); + return_value = bytes_split_impl((PyBytesObject *)self, sep, maxsplit); exit: return return_value; @@ -134,7 +134,7 @@ static PyObject * bytes_partition_impl(PyBytesObject *self, Py_buffer *sep); static PyObject * -bytes_partition(PyBytesObject *self, PyObject *arg) +bytes_partition(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer sep = {NULL, NULL}; @@ -142,7 +142,7 @@ bytes_partition(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &sep, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_partition_impl(self, &sep); + return_value = bytes_partition_impl((PyBytesObject *)self, &sep); exit: /* Cleanup for sep */ @@ -173,7 +173,7 @@ static PyObject * bytes_rpartition_impl(PyBytesObject *self, Py_buffer *sep); static PyObject * -bytes_rpartition(PyBytesObject *self, PyObject *arg) +bytes_rpartition(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer sep = {NULL, NULL}; @@ -181,7 +181,7 @@ bytes_rpartition(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &sep, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_rpartition_impl(self, &sep); + return_value = bytes_rpartition_impl((PyBytesObject *)self, &sep); exit: /* Cleanup for sep */ @@ -215,7 +215,7 @@ static PyObject * bytes_rsplit_impl(PyBytesObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytes_rsplit(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_rsplit(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -275,7 +275,7 @@ bytes_rsplit(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObj maxsplit = ival; } skip_optional_pos: - return_value = bytes_rsplit_impl(self, sep, maxsplit); + return_value = bytes_rsplit_impl((PyBytesObject *)self, sep, maxsplit); exit: return return_value; @@ -317,7 +317,7 @@ bytes_find_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_find(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_find(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -341,7 +341,7 @@ bytes_find(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_find_impl(self, sub, start, end); + return_value = bytes_find_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -368,7 +368,7 @@ bytes_index_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_index(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -392,7 +392,7 @@ bytes_index(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_index_impl(self, sub, start, end); + return_value = bytes_index_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -419,7 +419,7 @@ bytes_rfind_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_rfind(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_rfind(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -443,7 +443,7 @@ bytes_rfind(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_rfind_impl(self, sub, start, end); + return_value = bytes_rfind_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -470,7 +470,7 @@ bytes_rindex_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_rindex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_rindex(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -494,7 +494,7 @@ bytes_rindex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_rindex_impl(self, sub, start, end); + return_value = bytes_rindex_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -515,7 +515,7 @@ static PyObject * bytes_strip_impl(PyBytesObject *self, PyObject *bytes); static PyObject * -bytes_strip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_strip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -528,7 +528,7 @@ bytes_strip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } bytes = args[0]; skip_optional: - return_value = bytes_strip_impl(self, bytes); + return_value = bytes_strip_impl((PyBytesObject *)self, bytes); exit: return return_value; @@ -549,7 +549,7 @@ static PyObject * bytes_lstrip_impl(PyBytesObject *self, PyObject *bytes); static PyObject * -bytes_lstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_lstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -562,7 +562,7 @@ bytes_lstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } bytes = args[0]; skip_optional: - return_value = bytes_lstrip_impl(self, bytes); + return_value = bytes_lstrip_impl((PyBytesObject *)self, bytes); exit: return return_value; @@ -583,7 +583,7 @@ static PyObject * bytes_rstrip_impl(PyBytesObject *self, PyObject *bytes); static PyObject * -bytes_rstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_rstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -596,7 +596,7 @@ bytes_rstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } bytes = args[0]; skip_optional: - return_value = bytes_rstrip_impl(self, bytes); + return_value = bytes_rstrip_impl((PyBytesObject *)self, bytes); exit: return return_value; @@ -621,7 +621,7 @@ bytes_count_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_count(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_count(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -645,7 +645,7 @@ bytes_count(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_count_impl(self, sub, start, end); + return_value = bytes_count_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -671,7 +671,7 @@ bytes_translate_impl(PyBytesObject *self, PyObject *table, PyObject *deletechars); static PyObject * -bytes_translate(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_translate(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -715,7 +715,7 @@ bytes_translate(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, Py } deletechars = args[1]; skip_optional_pos: - return_value = bytes_translate_impl(self, table, deletechars); + return_value = bytes_translate_impl((PyBytesObject *)self, table, deletechars); exit: return return_value; @@ -790,7 +790,7 @@ bytes_replace_impl(PyBytesObject *self, Py_buffer *old, Py_buffer *new, Py_ssize_t count); static PyObject * -bytes_replace(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_buffer old = {NULL, NULL}; @@ -822,7 +822,7 @@ bytes_replace(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) count = ival; } skip_optional: - return_value = bytes_replace_impl(self, &old, &new, count); + return_value = bytes_replace_impl((PyBytesObject *)self, &old, &new, count); exit: /* Cleanup for old */ @@ -853,7 +853,7 @@ static PyObject * bytes_removeprefix_impl(PyBytesObject *self, Py_buffer *prefix); static PyObject * -bytes_removeprefix(PyBytesObject *self, PyObject *arg) +bytes_removeprefix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer prefix = {NULL, NULL}; @@ -861,7 +861,7 @@ bytes_removeprefix(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &prefix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_removeprefix_impl(self, &prefix); + return_value = bytes_removeprefix_impl((PyBytesObject *)self, &prefix); exit: /* Cleanup for prefix */ @@ -889,7 +889,7 @@ static PyObject * bytes_removesuffix_impl(PyBytesObject *self, Py_buffer *suffix); static PyObject * -bytes_removesuffix(PyBytesObject *self, PyObject *arg) +bytes_removesuffix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer suffix = {NULL, NULL}; @@ -897,7 +897,7 @@ bytes_removesuffix(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &suffix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_removesuffix_impl(self, &suffix); + return_value = bytes_removesuffix_impl((PyBytesObject *)self, &suffix); exit: /* Cleanup for suffix */ @@ -929,7 +929,7 @@ bytes_startswith_impl(PyBytesObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_startswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_startswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -953,7 +953,7 @@ bytes_startswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_startswith_impl(self, subobj, start, end); + return_value = bytes_startswith_impl((PyBytesObject *)self, subobj, start, end); exit: return return_value; @@ -980,7 +980,7 @@ bytes_endswith_impl(PyBytesObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_endswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_endswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -1004,7 +1004,7 @@ bytes_endswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_endswith_impl(self, subobj, start, end); + return_value = bytes_endswith_impl((PyBytesObject *)self, subobj, start, end); exit: return return_value; @@ -1033,7 +1033,7 @@ bytes_decode_impl(PyBytesObject *self, const char *encoding, const char *errors); static PyObject * -bytes_decode(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1106,7 +1106,7 @@ bytes_decode(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObj goto exit; } skip_optional_pos: - return_value = bytes_decode_impl(self, encoding, errors); + return_value = bytes_decode_impl((PyBytesObject *)self, encoding, errors); exit: return return_value; @@ -1128,7 +1128,7 @@ static PyObject * bytes_splitlines_impl(PyBytesObject *self, int keepends); static PyObject * -bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1173,7 +1173,7 @@ bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, P goto exit; } skip_optional_pos: - return_value = bytes_splitlines_impl(self, keepends); + return_value = bytes_splitlines_impl((PyBytesObject *)self, keepends); exit: return return_value; @@ -1241,7 +1241,7 @@ static PyObject * bytes_hex_impl(PyBytesObject *self, PyObject *sep, int bytes_per_sep); static PyObject * -bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_hex(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1293,7 +1293,7 @@ bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto exit; } skip_optional_pos: - return_value = bytes_hex_impl(self, sep, bytes_per_sep); + return_value = bytes_hex_impl((PyBytesObject *)self, sep, bytes_per_sep); exit: return return_value; @@ -1391,4 +1391,4 @@ bytes_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=fb7939a1983e463a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=96fe2d6ef9ac8f6a input=a9049054013a1b77]*/ diff --git a/Objects/clinic/classobject.c.h b/Objects/clinic/classobject.c.h index 3e149c97324a6a..5934f1c2a41669 100644 --- a/Objects/clinic/classobject.c.h +++ b/Objects/clinic/classobject.c.h @@ -16,9 +16,9 @@ static PyObject * method___reduce___impl(PyMethodObject *self); static PyObject * -method___reduce__(PyMethodObject *self, PyObject *Py_UNUSED(ignored)) +method___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return method___reduce___impl(self); + return method___reduce___impl((PyMethodObject *)self); } PyDoc_STRVAR(method_new__doc__, @@ -82,4 +82,4 @@ instancemethod_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=5a5e3f2d0726f189 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ab546abf90aac94e input=a9049054013a1b77]*/ diff --git a/Objects/clinic/codeobject.c.h b/Objects/clinic/codeobject.c.h index 45738f767df50f..2184742cc0d99d 100644 --- a/Objects/clinic/codeobject.c.h +++ b/Objects/clinic/codeobject.c.h @@ -174,7 +174,7 @@ code_replace_impl(PyCodeObject *self, int co_argcount, PyObject *co_exceptiontable); static PyObject * -code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +code_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -204,24 +204,24 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje #undef KWTUPLE PyObject *argsbuf[18]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - int co_argcount = self->co_argcount; - int co_posonlyargcount = self->co_posonlyargcount; - int co_kwonlyargcount = self->co_kwonlyargcount; - int co_nlocals = self->co_nlocals; - int co_stacksize = self->co_stacksize; - int co_flags = self->co_flags; - int co_firstlineno = self->co_firstlineno; + int co_argcount = ((PyCodeObject *)self)->co_argcount; + int co_posonlyargcount = ((PyCodeObject *)self)->co_posonlyargcount; + int co_kwonlyargcount = ((PyCodeObject *)self)->co_kwonlyargcount; + int co_nlocals = ((PyCodeObject *)self)->co_nlocals; + int co_stacksize = ((PyCodeObject *)self)->co_stacksize; + int co_flags = ((PyCodeObject *)self)->co_flags; + int co_firstlineno = ((PyCodeObject *)self)->co_firstlineno; PyObject *co_code = NULL; - PyObject *co_consts = self->co_consts; - PyObject *co_names = self->co_names; + PyObject *co_consts = ((PyCodeObject *)self)->co_consts; + PyObject *co_names = ((PyCodeObject *)self)->co_names; PyObject *co_varnames = NULL; PyObject *co_freevars = NULL; PyObject *co_cellvars = NULL; - PyObject *co_filename = self->co_filename; - PyObject *co_name = self->co_name; - PyObject *co_qualname = self->co_qualname; - PyObject *co_linetable = self->co_linetable; - PyObject *co_exceptiontable = self->co_exceptiontable; + PyObject *co_filename = ((PyCodeObject *)self)->co_filename; + PyObject *co_name = ((PyCodeObject *)self)->co_name; + PyObject *co_qualname = ((PyCodeObject *)self)->co_qualname; + PyObject *co_linetable = ((PyCodeObject *)self)->co_linetable; + PyObject *co_exceptiontable = ((PyCodeObject *)self)->co_exceptiontable; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, /*minpos*/ 0, /*maxpos*/ 0, /*minkw*/ 0, /*varpos*/ 0, argsbuf); @@ -400,7 +400,7 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } co_exceptiontable = args[17]; skip_optional_kwonly: - return_value = code_replace_impl(self, co_argcount, co_posonlyargcount, co_kwonlyargcount, co_nlocals, co_stacksize, co_flags, co_firstlineno, co_code, co_consts, co_names, co_varnames, co_freevars, co_cellvars, co_filename, co_name, co_qualname, co_linetable, co_exceptiontable); + return_value = code_replace_impl((PyCodeObject *)self, co_argcount, co_posonlyargcount, co_kwonlyargcount, co_nlocals, co_stacksize, co_flags, co_firstlineno, co_code, co_consts, co_names, co_varnames, co_freevars, co_cellvars, co_filename, co_name, co_qualname, co_linetable, co_exceptiontable); exit: return return_value; @@ -421,7 +421,7 @@ static PyObject * code__varname_from_oparg_impl(PyCodeObject *self, int oparg); static PyObject * -code__varname_from_oparg(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +code__varname_from_oparg(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -461,9 +461,9 @@ code__varname_from_oparg(PyCodeObject *self, PyObject *const *args, Py_ssize_t n if (oparg == -1 && PyErr_Occurred()) { goto exit; } - return_value = code__varname_from_oparg_impl(self, oparg); + return_value = code__varname_from_oparg_impl((PyCodeObject *)self, oparg); exit: return return_value; } -/*[clinic end generated code: output=e919ea67a1bcf524 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=73861c79e93aaee5 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/complexobject.c.h b/Objects/clinic/complexobject.c.h index 3c3d1071b6eec3..e00da1d960c54d 100644 --- a/Objects/clinic/complexobject.c.h +++ b/Objects/clinic/complexobject.c.h @@ -21,9 +21,9 @@ static PyObject * complex_conjugate_impl(PyComplexObject *self); static PyObject * -complex_conjugate(PyComplexObject *self, PyObject *Py_UNUSED(ignored)) +complex_conjugate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return complex_conjugate_impl(self); + return complex_conjugate_impl((PyComplexObject *)self); } PyDoc_STRVAR(complex___getnewargs____doc__, @@ -38,9 +38,9 @@ static PyObject * complex___getnewargs___impl(PyComplexObject *self); static PyObject * -complex___getnewargs__(PyComplexObject *self, PyObject *Py_UNUSED(ignored)) +complex___getnewargs__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return complex___getnewargs___impl(self); + return complex___getnewargs___impl((PyComplexObject *)self); } PyDoc_STRVAR(complex___format____doc__, @@ -56,7 +56,7 @@ static PyObject * complex___format___impl(PyComplexObject *self, PyObject *format_spec); static PyObject * -complex___format__(PyComplexObject *self, PyObject *arg) +complex___format__(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *format_spec; @@ -66,7 +66,7 @@ complex___format__(PyComplexObject *self, PyObject *arg) goto exit; } format_spec = arg; - return_value = complex___format___impl(self, format_spec); + return_value = complex___format___impl((PyComplexObject *)self, format_spec); exit: return return_value; @@ -85,9 +85,9 @@ static PyObject * complex___complex___impl(PyComplexObject *self); static PyObject * -complex___complex__(PyComplexObject *self, PyObject *Py_UNUSED(ignored)) +complex___complex__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return complex___complex___impl(self); + return complex___complex___impl((PyComplexObject *)self); } PyDoc_STRVAR(complex_new__doc__, @@ -170,4 +170,4 @@ PyDoc_STRVAR(complex_from_number__doc__, #define COMPLEX_FROM_NUMBER_METHODDEF \ {"from_number", (PyCFunction)complex_from_number, METH_O|METH_CLASS, complex_from_number__doc__}, -/*[clinic end generated code: output=8c49a41c5a7f0aee input=a9049054013a1b77]*/ +/*[clinic end generated code: output=252cddef7f9169a0 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/dictobject.c.h b/Objects/clinic/dictobject.c.h index fb46c4c64334f9..c66916bb33aa37 100644 --- a/Objects/clinic/dictobject.c.h +++ b/Objects/clinic/dictobject.c.h @@ -52,9 +52,9 @@ static PyObject * dict_copy_impl(PyDictObject *self); static PyObject * -dict_copy(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_copy_impl(self); + return dict_copy_impl((PyDictObject *)self); } PyDoc_STRVAR(dict___contains____doc__, @@ -79,7 +79,7 @@ static PyObject * dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value); static PyObject * -dict_get(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +dict_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -94,9 +94,7 @@ dict_get(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - Py_BEGIN_CRITICAL_SECTION(self); - return_value = dict_get_impl(self, key, default_value); - Py_END_CRITICAL_SECTION(); + return_value = dict_get_impl((PyDictObject *)self, key, default_value); exit: return return_value; @@ -118,7 +116,7 @@ dict_setdefault_impl(PyDictObject *self, PyObject *key, PyObject *default_value); static PyObject * -dict_setdefault(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +dict_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -134,7 +132,7 @@ dict_setdefault(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) default_value = args[1]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = dict_setdefault_impl(self, key, default_value); + return_value = dict_setdefault_impl((PyDictObject *)self, key, default_value); Py_END_CRITICAL_SECTION(); exit: @@ -154,9 +152,9 @@ static PyObject * dict_clear_impl(PyDictObject *self); static PyObject * -dict_clear(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_clear_impl(self); + return dict_clear_impl((PyDictObject *)self); } PyDoc_STRVAR(dict_pop__doc__, @@ -175,7 +173,7 @@ static PyObject * dict_pop_impl(PyDictObject *self, PyObject *key, PyObject *default_value); static PyObject * -dict_pop(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +dict_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -190,7 +188,7 @@ dict_pop(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - return_value = dict_pop_impl(self, key, default_value); + return_value = dict_pop_impl((PyDictObject *)self, key, default_value); exit: return return_value; @@ -212,12 +210,12 @@ static PyObject * dict_popitem_impl(PyDictObject *self); static PyObject * -dict_popitem(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_popitem(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = dict_popitem_impl(self); + return_value = dict_popitem_impl((PyDictObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -236,9 +234,9 @@ static PyObject * dict___sizeof___impl(PyDictObject *self); static PyObject * -dict___sizeof__(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict___sizeof___impl(self); + return dict___sizeof___impl((PyDictObject *)self); } PyDoc_STRVAR(dict___reversed____doc__, @@ -254,9 +252,9 @@ static PyObject * dict___reversed___impl(PyDictObject *self); static PyObject * -dict___reversed__(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict___reversed__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict___reversed___impl(self); + return dict___reversed___impl((PyDictObject *)self); } PyDoc_STRVAR(dict_keys__doc__, @@ -272,9 +270,9 @@ static PyObject * dict_keys_impl(PyDictObject *self); static PyObject * -dict_keys(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_keys_impl(self); + return dict_keys_impl((PyDictObject *)self); } PyDoc_STRVAR(dict_items__doc__, @@ -290,9 +288,9 @@ static PyObject * dict_items_impl(PyDictObject *self); static PyObject * -dict_items(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_items(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_items_impl(self); + return dict_items_impl((PyDictObject *)self); } PyDoc_STRVAR(dict_values__doc__, @@ -308,8 +306,8 @@ static PyObject * dict_values_impl(PyDictObject *self); static PyObject * -dict_values(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_values(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_values_impl(self); + return dict_values_impl((PyDictObject *)self); } -/*[clinic end generated code: output=f3dd5f3fb8122aef input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0f04bf0e7e6b130f input=a9049054013a1b77]*/ diff --git a/Objects/clinic/exceptions.c.h b/Objects/clinic/exceptions.c.h new file mode 100644 index 00000000000000..8699df07495ad8 --- /dev/null +++ b/Objects/clinic/exceptions.c.h @@ -0,0 +1,383 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() +#include "pycore_modsupport.h" // _PyArg_BadArgument() + +PyDoc_STRVAR(BaseException___reduce____doc__, +"__reduce__($self, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTION___REDUCE___METHODDEF \ + {"__reduce__", (PyCFunction)BaseException___reduce__, METH_NOARGS, BaseException___reduce____doc__}, + +static PyObject * +BaseException___reduce___impl(PyBaseExceptionObject *self); + +static PyObject * +BaseException___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___reduce___impl((PyBaseExceptionObject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseException___setstate____doc__, +"__setstate__($self, state, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTION___SETSTATE___METHODDEF \ + {"__setstate__", (PyCFunction)BaseException___setstate__, METH_O, BaseException___setstate____doc__}, + +static PyObject * +BaseException___setstate___impl(PyBaseExceptionObject *self, PyObject *state); + +static PyObject * +BaseException___setstate__(PyBaseExceptionObject *self, PyObject *state) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___setstate___impl((PyBaseExceptionObject *)self, state); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseException_with_traceback__doc__, +"with_traceback($self, tb, /)\n" +"--\n" +"\n" +"Set self.__traceback__ to tb and return self."); + +#define BASEEXCEPTION_WITH_TRACEBACK_METHODDEF \ + {"with_traceback", (PyCFunction)BaseException_with_traceback, METH_O, BaseException_with_traceback__doc__}, + +static PyObject * +BaseException_with_traceback_impl(PyBaseExceptionObject *self, PyObject *tb); + +static PyObject * +BaseException_with_traceback(PyBaseExceptionObject *self, PyObject *tb) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException_with_traceback_impl((PyBaseExceptionObject *)self, tb); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseException_add_note__doc__, +"add_note($self, note, /)\n" +"--\n" +"\n" +"Add a note to the exception"); + +#define BASEEXCEPTION_ADD_NOTE_METHODDEF \ + {"add_note", (PyCFunction)BaseException_add_note, METH_O, BaseException_add_note__doc__}, + +static PyObject * +BaseException_add_note_impl(PyBaseExceptionObject *self, PyObject *note); + +static PyObject * +BaseException_add_note(PyObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + PyObject *note; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("add_note", "argument", "str", arg); + goto exit; + } + note = arg; + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException_add_note_impl((PyBaseExceptionObject *)self, note); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; +} + +#if !defined(BaseException_args_DOCSTR) +# define BaseException_args_DOCSTR NULL +#endif +#if defined(BASEEXCEPTION_ARGS_GETSETDEF) +# undef BASEEXCEPTION_ARGS_GETSETDEF +# define BASEEXCEPTION_ARGS_GETSETDEF {"args", (getter)BaseException_args_get, (setter)BaseException_args_set, BaseException_args_DOCSTR}, +#else +# define BASEEXCEPTION_ARGS_GETSETDEF {"args", (getter)BaseException_args_get, NULL, BaseException_args_DOCSTR}, +#endif + +static PyObject * +BaseException_args_get_impl(PyBaseExceptionObject *self); + +static PyObject * +BaseException_args_get(PyObject *self, void *Py_UNUSED(context)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException_args_get_impl((PyBaseExceptionObject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException_args_DOCSTR) +# define BaseException_args_DOCSTR NULL +#endif +#if defined(BASEEXCEPTION_ARGS_GETSETDEF) +# undef BASEEXCEPTION_ARGS_GETSETDEF +# define BASEEXCEPTION_ARGS_GETSETDEF {"args", (getter)BaseException_args_get, (setter)BaseException_args_set, BaseException_args_DOCSTR}, +#else +# define BASEEXCEPTION_ARGS_GETSETDEF {"args", NULL, (setter)BaseException_args_set, NULL}, +#endif + +static int +BaseException_args_set_impl(PyBaseExceptionObject *self, PyObject *value); + +static int +BaseException_args_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) +{ + int return_value; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException_args_set_impl((PyBaseExceptionObject *)self, value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException___traceback___DOCSTR) +# define BaseException___traceback___DOCSTR NULL +#endif +#if defined(BASEEXCEPTION___TRACEBACK___GETSETDEF) +# undef BASEEXCEPTION___TRACEBACK___GETSETDEF +# define BASEEXCEPTION___TRACEBACK___GETSETDEF {"__traceback__", (getter)BaseException___traceback___get, (setter)BaseException___traceback___set, BaseException___traceback___DOCSTR}, +#else +# define BASEEXCEPTION___TRACEBACK___GETSETDEF {"__traceback__", (getter)BaseException___traceback___get, NULL, BaseException___traceback___DOCSTR}, +#endif + +static PyObject * +BaseException___traceback___get_impl(PyBaseExceptionObject *self); + +static PyObject * +BaseException___traceback___get(PyObject *self, void *Py_UNUSED(context)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___traceback___get_impl((PyBaseExceptionObject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException___traceback___DOCSTR) +# define BaseException___traceback___DOCSTR NULL +#endif +#if defined(BASEEXCEPTION___TRACEBACK___GETSETDEF) +# undef BASEEXCEPTION___TRACEBACK___GETSETDEF +# define BASEEXCEPTION___TRACEBACK___GETSETDEF {"__traceback__", (getter)BaseException___traceback___get, (setter)BaseException___traceback___set, BaseException___traceback___DOCSTR}, +#else +# define BASEEXCEPTION___TRACEBACK___GETSETDEF {"__traceback__", NULL, (setter)BaseException___traceback___set, NULL}, +#endif + +static int +BaseException___traceback___set_impl(PyBaseExceptionObject *self, + PyObject *value); + +static int +BaseException___traceback___set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) +{ + int return_value; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___traceback___set_impl((PyBaseExceptionObject *)self, value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException___context___DOCSTR) +# define BaseException___context___DOCSTR NULL +#endif +#if defined(BASEEXCEPTION___CONTEXT___GETSETDEF) +# undef BASEEXCEPTION___CONTEXT___GETSETDEF +# define BASEEXCEPTION___CONTEXT___GETSETDEF {"__context__", (getter)BaseException___context___get, (setter)BaseException___context___set, BaseException___context___DOCSTR}, +#else +# define BASEEXCEPTION___CONTEXT___GETSETDEF {"__context__", (getter)BaseException___context___get, NULL, BaseException___context___DOCSTR}, +#endif + +static PyObject * +BaseException___context___get_impl(PyBaseExceptionObject *self); + +static PyObject * +BaseException___context___get(PyObject *self, void *Py_UNUSED(context)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___context___get_impl((PyBaseExceptionObject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException___context___DOCSTR) +# define BaseException___context___DOCSTR NULL +#endif +#if defined(BASEEXCEPTION___CONTEXT___GETSETDEF) +# undef BASEEXCEPTION___CONTEXT___GETSETDEF +# define BASEEXCEPTION___CONTEXT___GETSETDEF {"__context__", (getter)BaseException___context___get, (setter)BaseException___context___set, BaseException___context___DOCSTR}, +#else +# define BASEEXCEPTION___CONTEXT___GETSETDEF {"__context__", NULL, (setter)BaseException___context___set, NULL}, +#endif + +static int +BaseException___context___set_impl(PyBaseExceptionObject *self, + PyObject *value); + +static int +BaseException___context___set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) +{ + int return_value; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___context___set_impl((PyBaseExceptionObject *)self, value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException___cause___DOCSTR) +# define BaseException___cause___DOCSTR NULL +#endif +#if defined(BASEEXCEPTION___CAUSE___GETSETDEF) +# undef BASEEXCEPTION___CAUSE___GETSETDEF +# define BASEEXCEPTION___CAUSE___GETSETDEF {"__cause__", (getter)BaseException___cause___get, (setter)BaseException___cause___set, BaseException___cause___DOCSTR}, +#else +# define BASEEXCEPTION___CAUSE___GETSETDEF {"__cause__", (getter)BaseException___cause___get, NULL, BaseException___cause___DOCSTR}, +#endif + +static PyObject * +BaseException___cause___get_impl(PyBaseExceptionObject *self); + +static PyObject * +BaseException___cause___get(PyObject *self, void *Py_UNUSED(context)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___cause___get_impl((PyBaseExceptionObject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +#if !defined(BaseException___cause___DOCSTR) +# define BaseException___cause___DOCSTR NULL +#endif +#if defined(BASEEXCEPTION___CAUSE___GETSETDEF) +# undef BASEEXCEPTION___CAUSE___GETSETDEF +# define BASEEXCEPTION___CAUSE___GETSETDEF {"__cause__", (getter)BaseException___cause___get, (setter)BaseException___cause___set, BaseException___cause___DOCSTR}, +#else +# define BASEEXCEPTION___CAUSE___GETSETDEF {"__cause__", NULL, (setter)BaseException___cause___set, NULL}, +#endif + +static int +BaseException___cause___set_impl(PyBaseExceptionObject *self, + PyObject *value); + +static int +BaseException___cause___set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) +{ + int return_value; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseException___cause___set_impl((PyBaseExceptionObject *)self, value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseExceptionGroup_derive__doc__, +"derive($self, excs, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTIONGROUP_DERIVE_METHODDEF \ + {"derive", (PyCFunction)BaseExceptionGroup_derive, METH_O, BaseExceptionGroup_derive__doc__}, + +static PyObject * +BaseExceptionGroup_derive_impl(PyBaseExceptionGroupObject *self, + PyObject *excs); + +static PyObject * +BaseExceptionGroup_derive(PyBaseExceptionGroupObject *self, PyObject *excs) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseExceptionGroup_derive_impl((PyBaseExceptionGroupObject *)self, excs); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseExceptionGroup_split__doc__, +"split($self, matcher_value, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTIONGROUP_SPLIT_METHODDEF \ + {"split", (PyCFunction)BaseExceptionGroup_split, METH_O, BaseExceptionGroup_split__doc__}, + +static PyObject * +BaseExceptionGroup_split_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value); + +static PyObject * +BaseExceptionGroup_split(PyBaseExceptionGroupObject *self, PyObject *matcher_value) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseExceptionGroup_split_impl((PyBaseExceptionGroupObject *)self, matcher_value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseExceptionGroup_subgroup__doc__, +"subgroup($self, matcher_value, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTIONGROUP_SUBGROUP_METHODDEF \ + {"subgroup", (PyCFunction)BaseExceptionGroup_subgroup, METH_O, BaseExceptionGroup_subgroup__doc__}, + +static PyObject * +BaseExceptionGroup_subgroup_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value); + +static PyObject * +BaseExceptionGroup_subgroup(PyBaseExceptionGroupObject *self, PyObject *matcher_value) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseExceptionGroup_subgroup_impl((PyBaseExceptionGroupObject *)self, matcher_value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} +/*[clinic end generated code: output=19aed708dcaf7184 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h index 975f253c096275..a29ed9f7088700 100644 --- a/Objects/clinic/listobject.c.h +++ b/Objects/clinic/listobject.c.h @@ -23,7 +23,7 @@ static PyObject * list_insert_impl(PyListObject *self, Py_ssize_t index, PyObject *object); static PyObject * -list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) +list_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -46,7 +46,7 @@ list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) } object = args[1]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_insert_impl(self, index, object); + return_value = list_insert_impl((PyListObject *)self, index, object); Py_END_CRITICAL_SECTION(); exit: @@ -66,12 +66,12 @@ static PyObject * py_list_clear_impl(PyListObject *self); static PyObject * -py_list_clear(PyListObject *self, PyObject *Py_UNUSED(ignored)) +py_list_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = py_list_clear_impl(self); + return_value = py_list_clear_impl((PyListObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -90,12 +90,12 @@ static PyObject * list_copy_impl(PyListObject *self); static PyObject * -list_copy(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_copy_impl(self); + return_value = list_copy_impl((PyListObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -119,7 +119,7 @@ list_append(PyListObject *self, PyObject *object) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_append_impl(self, object); + return_value = list_append_impl((PyListObject *)self, object); Py_END_CRITICAL_SECTION(); return return_value; @@ -149,7 +149,7 @@ static PyObject * list_pop_impl(PyListObject *self, Py_ssize_t index); static PyObject * -list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) +list_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index = -1; @@ -174,7 +174,7 @@ list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_pop_impl(self, index); + return_value = list_pop_impl((PyListObject *)self, index); Py_END_CRITICAL_SECTION(); exit: @@ -202,7 +202,7 @@ static PyObject * list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse); static PyObject * -list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +list_sort(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -255,7 +255,7 @@ list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_sort_impl(self, keyfunc, reverse); + return_value = list_sort_impl((PyListObject *)self, keyfunc, reverse); Py_END_CRITICAL_SECTION(); exit: @@ -275,12 +275,12 @@ static PyObject * list_reverse_impl(PyListObject *self); static PyObject * -list_reverse(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list_reverse(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_reverse_impl(self); + return_value = list_reverse_impl((PyListObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -302,7 +302,7 @@ list_index_impl(PyListObject *self, PyObject *value, Py_ssize_t start, Py_ssize_t stop); static PyObject * -list_index(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) +list_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *value; @@ -326,7 +326,7 @@ list_index(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = list_index_impl(self, value, start, stop); + return_value = list_index_impl((PyListObject *)self, value, start, stop); exit: return return_value; @@ -361,7 +361,7 @@ list_remove(PyListObject *self, PyObject *value) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_remove_impl(self, value); + return_value = list_remove_impl((PyListObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -418,9 +418,9 @@ static PyObject * list___sizeof___impl(PyListObject *self); static PyObject * -list___sizeof__(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return list___sizeof___impl(self); + return list___sizeof___impl((PyListObject *)self); } PyDoc_STRVAR(list___reversed____doc__, @@ -436,8 +436,8 @@ static PyObject * list___reversed___impl(PyListObject *self); static PyObject * -list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list___reversed__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return list___reversed___impl(self); + return list___reversed___impl((PyListObject *)self); } -/*[clinic end generated code: output=9357151278d77ea1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=35c43dc33f9ba521 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/memoryobject.c.h b/Objects/clinic/memoryobject.c.h index a6cf1f431a15b0..4706c92051926c 100644 --- a/Objects/clinic/memoryobject.c.h +++ b/Objects/clinic/memoryobject.c.h @@ -137,9 +137,9 @@ static PyObject * memoryview_release_impl(PyMemoryViewObject *self); static PyObject * -memoryview_release(PyMemoryViewObject *self, PyObject *Py_UNUSED(ignored)) +memoryview_release(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return memoryview_release_impl(self); + return memoryview_release_impl((PyMemoryViewObject *)self); } PyDoc_STRVAR(memoryview_cast__doc__, @@ -156,7 +156,7 @@ memoryview_cast_impl(PyMemoryViewObject *self, PyObject *format, PyObject *shape); static PyObject * -memoryview_cast(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +memoryview_cast(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -204,7 +204,7 @@ memoryview_cast(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t narg } shape = args[1]; skip_optional_pos: - return_value = memoryview_cast_impl(self, format, shape); + return_value = memoryview_cast_impl((PyMemoryViewObject *)self, format, shape); exit: return return_value; @@ -223,9 +223,9 @@ static PyObject * memoryview_toreadonly_impl(PyMemoryViewObject *self); static PyObject * -memoryview_toreadonly(PyMemoryViewObject *self, PyObject *Py_UNUSED(ignored)) +memoryview_toreadonly(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return memoryview_toreadonly_impl(self); + return memoryview_toreadonly_impl((PyMemoryViewObject *)self); } PyDoc_STRVAR(memoryview_tolist__doc__, @@ -241,9 +241,9 @@ static PyObject * memoryview_tolist_impl(PyMemoryViewObject *self); static PyObject * -memoryview_tolist(PyMemoryViewObject *self, PyObject *Py_UNUSED(ignored)) +memoryview_tolist(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return memoryview_tolist_impl(self); + return memoryview_tolist_impl((PyMemoryViewObject *)self); } PyDoc_STRVAR(memoryview_tobytes__doc__, @@ -265,7 +265,7 @@ static PyObject * memoryview_tobytes_impl(PyMemoryViewObject *self, const char *order); static PyObject * -memoryview_tobytes(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +memoryview_tobytes(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -324,7 +324,7 @@ memoryview_tobytes(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_pos: - return_value = memoryview_tobytes_impl(self, order); + return_value = memoryview_tobytes_impl((PyMemoryViewObject *)self, order); exit: return return_value; @@ -361,7 +361,7 @@ memoryview_hex_impl(PyMemoryViewObject *self, PyObject *sep, int bytes_per_sep); static PyObject * -memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +memoryview_hex(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -413,7 +413,7 @@ memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional_pos: - return_value = memoryview_hex_impl(self, sep, bytes_per_sep); + return_value = memoryview_hex_impl((PyMemoryViewObject *)self, sep, bytes_per_sep); exit: return return_value; @@ -444,7 +444,7 @@ memoryview_index_impl(PyMemoryViewObject *self, PyObject *value, Py_ssize_t start, Py_ssize_t stop); static PyObject * -memoryview_index(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs) +memoryview_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *value; @@ -468,9 +468,9 @@ memoryview_index(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nar goto exit; } skip_optional: - return_value = memoryview_index_impl(self, value, start, stop); + return_value = memoryview_index_impl((PyMemoryViewObject *)self, value, start, stop); exit: return return_value; } -/*[clinic end generated code: output=132893ef5f67ad73 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2ef6c061d9c4e3dc input=a9049054013a1b77]*/ diff --git a/Objects/clinic/odictobject.c.h b/Objects/clinic/odictobject.c.h index 4b97596e5dbd2f..44d89c4e0ef2f7 100644 --- a/Objects/clinic/odictobject.c.h +++ b/Objects/clinic/odictobject.c.h @@ -87,7 +87,7 @@ OrderedDict_setdefault_impl(PyODictObject *self, PyObject *key, PyObject *default_value); static PyObject * -OrderedDict_setdefault(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -131,7 +131,7 @@ OrderedDict_setdefault(PyODictObject *self, PyObject *const *args, Py_ssize_t na } default_value = args[1]; skip_optional_pos: - return_value = OrderedDict_setdefault_impl(self, key, default_value); + return_value = OrderedDict_setdefault_impl((PyODictObject *)self, key, default_value); exit: return return_value; @@ -154,7 +154,7 @@ OrderedDict_pop_impl(PyODictObject *self, PyObject *key, PyObject *default_value); static PyObject * -OrderedDict_pop(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -198,7 +198,7 @@ OrderedDict_pop(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, Py } default_value = args[1]; skip_optional_pos: - return_value = OrderedDict_pop_impl(self, key, default_value); + return_value = OrderedDict_pop_impl((PyODictObject *)self, key, default_value); exit: return return_value; @@ -219,7 +219,7 @@ static PyObject * OrderedDict_popitem_impl(PyODictObject *self, int last); static PyObject * -OrderedDict_popitem(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_popitem(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -264,7 +264,7 @@ OrderedDict_popitem(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional_pos: - return_value = OrderedDict_popitem_impl(self, last); + return_value = OrderedDict_popitem_impl((PyODictObject *)self, last); exit: return return_value; @@ -285,7 +285,7 @@ static PyObject * OrderedDict_move_to_end_impl(PyODictObject *self, PyObject *key, int last); static PyObject * -OrderedDict_move_to_end(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_move_to_end(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -332,9 +332,9 @@ OrderedDict_move_to_end(PyODictObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_pos: - return_value = OrderedDict_move_to_end_impl(self, key, last); + return_value = OrderedDict_move_to_end_impl((PyODictObject *)self, key, last); exit: return return_value; } -/*[clinic end generated code: output=2aa6fc0567c9252c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=55bd390bb516e997 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/setobject.c.h b/Objects/clinic/setobject.c.h index 986993b4aa9bda..bf7e604e4b0a46 100644 --- a/Objects/clinic/setobject.c.h +++ b/Objects/clinic/setobject.c.h @@ -19,12 +19,12 @@ static PyObject * set_pop_impl(PySetObject *so); static PyObject * -set_pop(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set_pop(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_pop_impl(so); + return_value = set_pop_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -44,7 +44,7 @@ set_update_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_update(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -52,7 +52,7 @@ set_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_update_impl(so, others, others_length); + return_value = set_update_impl((PySetObject *)so, others, others_length); return return_value; } @@ -70,12 +70,12 @@ static PyObject * set_copy_impl(PySetObject *so); static PyObject * -set_copy(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set_copy(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_copy_impl(so); + return_value = set_copy_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -94,12 +94,12 @@ static PyObject * frozenset_copy_impl(PySetObject *so); static PyObject * -frozenset_copy(PySetObject *so, PyObject *Py_UNUSED(ignored)) +frozenset_copy(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = frozenset_copy_impl(so); + return_value = frozenset_copy_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -118,12 +118,12 @@ static PyObject * set_clear_impl(PySetObject *so); static PyObject * -set_clear(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set_clear(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_clear_impl(so); + return_value = set_clear_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -143,7 +143,7 @@ set_union_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_union(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_union(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -151,7 +151,7 @@ set_union(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_union_impl(so, others, others_length); + return_value = set_union_impl((PySetObject *)so, others, others_length); return return_value; } @@ -170,7 +170,7 @@ set_intersection_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_intersection_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_intersection_multi(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -178,7 +178,7 @@ set_intersection_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_intersection_multi_impl(so, others, others_length); + return_value = set_intersection_multi_impl((PySetObject *)so, others, others_length); return return_value; } @@ -197,7 +197,7 @@ set_intersection_update_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_intersection_update_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_intersection_update_multi(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -205,7 +205,7 @@ set_intersection_update_multi(PySetObject *so, PyObject *const *args, Py_ssize_t others = args; others_length = nargs; - return_value = set_intersection_update_multi_impl(so, others, others_length); + return_value = set_intersection_update_multi_impl((PySetObject *)so, others, others_length); return return_value; } @@ -228,7 +228,7 @@ set_isdisjoint(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_isdisjoint_impl(so, other); + return_value = set_isdisjoint_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -248,7 +248,7 @@ set_difference_update_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_difference_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_difference_update(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -256,7 +256,7 @@ set_difference_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_difference_update_impl(so, others, others_length); + return_value = set_difference_update_impl((PySetObject *)so, others, others_length); return return_value; } @@ -275,7 +275,7 @@ set_difference_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_difference_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_difference_multi(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -283,7 +283,7 @@ set_difference_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_difference_multi_impl(so, others, others_length); + return_value = set_difference_multi_impl((PySetObject *)so, others, others_length); return return_value; } @@ -315,7 +315,7 @@ set_symmetric_difference(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_symmetric_difference_impl(so, other); + return_value = set_symmetric_difference_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -339,7 +339,7 @@ set_issubset(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_issubset_impl(so, other); + return_value = set_issubset_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -363,7 +363,7 @@ set_issuperset(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_issuperset_impl(so, other); + return_value = set_issuperset_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -389,7 +389,7 @@ set_add(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_add_impl(so, key); + return_value = set_add_impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -413,7 +413,7 @@ set___contains__(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set___contains___impl(so, key); + return_value = set___contains___impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -439,7 +439,7 @@ set_remove(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_remove_impl(so, key); + return_value = set_remove_impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -466,7 +466,7 @@ set_discard(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_discard_impl(so, key); + return_value = set_discard_impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -485,12 +485,12 @@ static PyObject * set___reduce___impl(PySetObject *so); static PyObject * -set___reduce__(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set___reduce__(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set___reduce___impl(so); + return_value = set___reduce___impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -509,14 +509,14 @@ static PyObject * set___sizeof___impl(PySetObject *so); static PyObject * -set___sizeof__(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set___sizeof__(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set___sizeof___impl(so); + return_value = set___sizeof___impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=4b65e7709927f31f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=83b7742a762ce465 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/tupleobject.c.h b/Objects/clinic/tupleobject.c.h index 5d6a2c481a5f2a..40ffd4c1755769 100644 --- a/Objects/clinic/tupleobject.c.h +++ b/Objects/clinic/tupleobject.c.h @@ -20,7 +20,7 @@ tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start, Py_ssize_t stop); static PyObject * -tuple_index(PyTupleObject *self, PyObject *const *args, Py_ssize_t nargs) +tuple_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *value; @@ -44,7 +44,7 @@ tuple_index(PyTupleObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = tuple_index_impl(self, value, start, stop); + return_value = tuple_index_impl((PyTupleObject *)self, value, start, stop); exit: return return_value; @@ -110,8 +110,8 @@ static PyObject * tuple___getnewargs___impl(PyTupleObject *self); static PyObject * -tuple___getnewargs__(PyTupleObject *self, PyObject *Py_UNUSED(ignored)) +tuple___getnewargs__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return tuple___getnewargs___impl(self); + return tuple___getnewargs___impl((PyTupleObject *)self); } -/*[clinic end generated code: output=a6a9abba5d121f4c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=779cb4a13db67397 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/typeobject.c.h b/Objects/clinic/typeobject.c.h index 1fa153598db213..5e8187b3f5b748 100644 --- a/Objects/clinic/typeobject.c.h +++ b/Objects/clinic/typeobject.c.h @@ -22,7 +22,7 @@ type___instancecheck__(PyTypeObject *self, PyObject *instance) PyObject *return_value = NULL; int _return_value; - _return_value = type___instancecheck___impl(self, instance); + _return_value = type___instancecheck___impl((PyTypeObject *)self, instance); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -50,7 +50,7 @@ type___subclasscheck__(PyTypeObject *self, PyObject *subclass) PyObject *return_value = NULL; int _return_value; - _return_value = type___subclasscheck___impl(self, subclass); + _return_value = type___subclasscheck___impl((PyTypeObject *)self, subclass); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -73,9 +73,9 @@ static PyObject * type_mro_impl(PyTypeObject *self); static PyObject * -type_mro(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type_mro(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type_mro_impl(self); + return type_mro_impl((PyTypeObject *)self); } PyDoc_STRVAR(type___subclasses____doc__, @@ -91,9 +91,9 @@ static PyObject * type___subclasses___impl(PyTypeObject *self); static PyObject * -type___subclasses__(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type___subclasses__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type___subclasses___impl(self); + return type___subclasses___impl((PyTypeObject *)self); } PyDoc_STRVAR(type___dir____doc__, @@ -109,9 +109,9 @@ static PyObject * type___dir___impl(PyTypeObject *self); static PyObject * -type___dir__(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type___dir__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type___dir___impl(self); + return type___dir___impl((PyTypeObject *)self); } PyDoc_STRVAR(type___sizeof____doc__, @@ -127,9 +127,9 @@ static PyObject * type___sizeof___impl(PyTypeObject *self); static PyObject * -type___sizeof__(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type___sizeof___impl(self); + return type___sizeof___impl((PyTypeObject *)self); } PyDoc_STRVAR(object___getstate____doc__, @@ -262,4 +262,4 @@ object___dir__(PyObject *self, PyObject *Py_UNUSED(ignored)) { return object___dir___impl(self); } -/*[clinic end generated code: output=b56c87f9cace1921 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f7db85fd11818c63 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/typevarobject.c.h b/Objects/clinic/typevarobject.c.h index c17998830b02eb..e50ed7d95b9b2a 100644 --- a/Objects/clinic/typevarobject.c.h +++ b/Objects/clinic/typevarobject.c.h @@ -143,7 +143,7 @@ typevar_typing_prepare_subst_impl(typevarobject *self, PyObject *alias, PyObject *args); static PyObject * -typevar_typing_prepare_subst(typevarobject *self, PyObject *const *args, Py_ssize_t nargs) +typevar_typing_prepare_subst(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *alias; @@ -154,7 +154,7 @@ typevar_typing_prepare_subst(typevarobject *self, PyObject *const *args, Py_ssiz } alias = args[0]; __clinic_args = args[1]; - return_value = typevar_typing_prepare_subst_impl(self, alias, __clinic_args); + return_value = typevar_typing_prepare_subst_impl((typevarobject *)self, alias, __clinic_args); exit: return return_value; @@ -172,9 +172,9 @@ static PyObject * typevar_reduce_impl(typevarobject *self); static PyObject * -typevar_reduce(typevarobject *self, PyObject *Py_UNUSED(ignored)) +typevar_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevar_reduce_impl(self); + return typevar_reduce_impl((typevarobject *)self); } PyDoc_STRVAR(typevar_has_default__doc__, @@ -189,9 +189,9 @@ static PyObject * typevar_has_default_impl(typevarobject *self); static PyObject * -typevar_has_default(typevarobject *self, PyObject *Py_UNUSED(ignored)) +typevar_has_default(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevar_has_default_impl(self); + return typevar_has_default_impl((typevarobject *)self); } PyDoc_STRVAR(paramspecargs_new__doc__, @@ -431,7 +431,7 @@ paramspec_typing_prepare_subst_impl(paramspecobject *self, PyObject *alias, PyObject *args); static PyObject * -paramspec_typing_prepare_subst(paramspecobject *self, PyObject *const *args, Py_ssize_t nargs) +paramspec_typing_prepare_subst(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *alias; @@ -442,7 +442,7 @@ paramspec_typing_prepare_subst(paramspecobject *self, PyObject *const *args, Py_ } alias = args[0]; __clinic_args = args[1]; - return_value = paramspec_typing_prepare_subst_impl(self, alias, __clinic_args); + return_value = paramspec_typing_prepare_subst_impl((paramspecobject *)self, alias, __clinic_args); exit: return return_value; @@ -460,9 +460,9 @@ static PyObject * paramspec_reduce_impl(paramspecobject *self); static PyObject * -paramspec_reduce(paramspecobject *self, PyObject *Py_UNUSED(ignored)) +paramspec_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return paramspec_reduce_impl(self); + return paramspec_reduce_impl((paramspecobject *)self); } PyDoc_STRVAR(paramspec_has_default__doc__, @@ -477,9 +477,9 @@ static PyObject * paramspec_has_default_impl(paramspecobject *self); static PyObject * -paramspec_has_default(paramspecobject *self, PyObject *Py_UNUSED(ignored)) +paramspec_has_default(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return paramspec_has_default_impl(self); + return paramspec_has_default_impl((paramspecobject *)self); } PyDoc_STRVAR(typevartuple__doc__, @@ -570,7 +570,7 @@ typevartuple_typing_prepare_subst_impl(typevartupleobject *self, PyObject *alias, PyObject *args); static PyObject * -typevartuple_typing_prepare_subst(typevartupleobject *self, PyObject *const *args, Py_ssize_t nargs) +typevartuple_typing_prepare_subst(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *alias; @@ -581,7 +581,7 @@ typevartuple_typing_prepare_subst(typevartupleobject *self, PyObject *const *arg } alias = args[0]; __clinic_args = args[1]; - return_value = typevartuple_typing_prepare_subst_impl(self, alias, __clinic_args); + return_value = typevartuple_typing_prepare_subst_impl((typevartupleobject *)self, alias, __clinic_args); exit: return return_value; @@ -599,9 +599,9 @@ static PyObject * typevartuple_reduce_impl(typevartupleobject *self); static PyObject * -typevartuple_reduce(typevartupleobject *self, PyObject *Py_UNUSED(ignored)) +typevartuple_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevartuple_reduce_impl(self); + return typevartuple_reduce_impl((typevartupleobject *)self); } PyDoc_STRVAR(typevartuple_has_default__doc__, @@ -616,9 +616,9 @@ static PyObject * typevartuple_has_default_impl(typevartupleobject *self); static PyObject * -typevartuple_has_default(typevartupleobject *self, PyObject *Py_UNUSED(ignored)) +typevartuple_has_default(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevartuple_has_default_impl(self); + return typevartuple_has_default_impl((typevartupleobject *)self); } PyDoc_STRVAR(typealias_reduce__doc__, @@ -633,9 +633,9 @@ static PyObject * typealias_reduce_impl(typealiasobject *self); static PyObject * -typealias_reduce(typealiasobject *self, PyObject *Py_UNUSED(ignored)) +typealias_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typealias_reduce_impl(self); + return typealias_reduce_impl((typealiasobject *)self); } PyDoc_STRVAR(typealias_new__doc__, @@ -706,4 +706,4 @@ typealias_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=26351c3549f5ad83 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f499d959a942c599 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h index 361f20c0fa4c1d..5c6a425b0f803a 100644 --- a/Objects/clinic/unicodeobject.c.h +++ b/Objects/clinic/unicodeobject.c.h @@ -22,9 +22,9 @@ static PyObject * EncodingMap_size_impl(struct encoding_map *self); static PyObject * -EncodingMap_size(struct encoding_map *self, PyObject *Py_UNUSED(ignored)) +EncodingMap_size(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EncodingMap_size_impl(self); + return EncodingMap_size_impl((struct encoding_map *)self); } PyDoc_STRVAR(unicode_title__doc__, @@ -1895,4 +1895,4 @@ unicode_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=30efbf79c5a07dd2 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4d1cecd6d08498a4 input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index ae232cae86799b..a7b46aa2dfbbc0 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -108,6 +108,8 @@ PyCode_ClearWatcher(int watcher_id) * generic helpers ******************/ +#define _PyCodeObject_CAST(op) (assert(PyCode_Check(op)), (PyCodeObject *)(op)) + static int should_intern_string(PyObject *o) { @@ -457,8 +459,7 @@ _PyCode_Validate(struct _PyCodeConstructor *con) } extern void -_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, - int enable_counters); +_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, int enable_counters); #ifdef Py_GIL_DISABLED static _PyCodeArray * _PyCodeArray_New(Py_ssize_t size); @@ -541,10 +542,9 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) } co->_co_firsttraceable = entry_point; #ifdef Py_GIL_DISABLED - _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), co->co_consts, - interp->config.tlbc_enabled); + _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), interp->config.tlbc_enabled); #else - _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), co->co_consts, 1); + _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), 1); #endif notify_code_watchers(PY_CODE_EVENT_CREATE, co); return 0; @@ -977,6 +977,9 @@ PyCode_Addr2Line(PyCodeObject *co, int addrq) if (addrq < 0) { return co->co_firstlineno; } + if (co->_co_monitoring && co->_co_monitoring->lines) { + return _Py_Instrumentation_GetLine(co, addrq/sizeof(_Py_CODEUNIT)); + } assert(addrq >= 0 && addrq < _PyCode_NBYTES(co)); PyCodeAddressRange bounds; _PyCode_InitAddressRange(co, &bounds); @@ -1865,11 +1868,12 @@ free_monitoring_data(_PyCoMonitoringData *data) } static void -code_dealloc(PyCodeObject *co) +code_dealloc(PyObject *self) { - _PyObject_ResurrectStart((PyObject *)co); + PyCodeObject *co = _PyCodeObject_CAST(self); + _PyObject_ResurrectStart(self); notify_code_watchers(PY_CODE_EVENT_DESTROY, co); - if (_PyObject_ResurrectEnd((PyObject *)co)) { + if (_PyObject_ResurrectEnd(self)) { return; } @@ -1908,7 +1912,7 @@ code_dealloc(PyCodeObject *co) Py_XDECREF(co->co_linetable); Py_XDECREF(co->co_exceptiontable); #ifdef Py_GIL_DISABLED - assert(co->_co_unique_id == -1); + assert(co->_co_unique_id == _Py_INVALID_UNIQUE_ID); #endif if (co->_co_cached != NULL) { Py_XDECREF(co->_co_cached->_co_code); @@ -1918,7 +1922,7 @@ code_dealloc(PyCodeObject *co) PyMem_Free(co->_co_cached); } if (co->co_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)co); + PyObject_ClearWeakRefs(self); } free_monitoring_data(co->_co_monitoring); #ifdef Py_GIL_DISABLED @@ -1939,7 +1943,7 @@ code_dealloc(PyCodeObject *co) static int code_traverse(PyObject *self, visitproc visit, void *arg) { - PyCodeObject *co = (PyCodeObject*)self; + PyCodeObject *co = _PyCodeObject_CAST(self); Py_VISIT(co->co_consts); return 0; } @@ -1948,7 +1952,7 @@ code_traverse(PyObject *self, visitproc visit, void *arg) static PyObject * code_repr(PyObject *self) { - PyCodeObject *co = (PyCodeObject*)self; + PyCodeObject *co = _PyCodeObject_CAST(self); int lineno; if (co->co_firstlineno != 0) lineno = co->co_firstlineno; @@ -2057,7 +2061,7 @@ code_richcompare(PyObject *self, PyObject *other, int op) static Py_hash_t code_hash(PyObject *self) { - PyCodeObject *co = (PyCodeObject*)self; + PyCodeObject *co = _PyCodeObject_CAST(self); Py_uhash_t uhash = 20221211; #define SCRAMBLE_IN(H) do { \ uhash ^= (Py_uhash_t)(H); \ @@ -2120,7 +2124,7 @@ static PyMemberDef code_memberlist[] = { static PyObject * code_getlnotab(PyObject *self, void *closure) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); if (PyErr_WarnEx(PyExc_DeprecationWarning, "co_lnotab is deprecated, use co_lines instead.", 1) < 0) { @@ -2132,28 +2136,28 @@ code_getlnotab(PyObject *self, void *closure) static PyObject * code_getvarnames(PyObject *self, void *closure) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); return _PyCode_GetVarnames(code); } static PyObject * code_getcellvars(PyObject *self, void *closure) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); return _PyCode_GetCellvars(code); } static PyObject * code_getfreevars(PyObject *self, void *closure) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); return _PyCode_GetFreevars(code); } static PyObject * code_getcodeadaptive(PyObject *self, void *closure) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); return PyBytes_FromStringAndSize(code->co_code_adaptive, _PyCode_NBYTES(code)); } @@ -2161,7 +2165,7 @@ code_getcodeadaptive(PyObject *self, void *closure) static PyObject * code_getcode(PyObject *self, void *closure) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); return _PyCode_GetCode(code); } @@ -2180,7 +2184,7 @@ static PyGetSetDef code_getsetlist[] = { static PyObject * code_sizeof(PyObject *self, PyObject *Py_UNUSED(args)) { - PyCodeObject *co = (PyCodeObject*)self; + PyCodeObject *co = _PyCodeObject_CAST(self); size_t res = _PyObject_VAR_SIZE(Py_TYPE(co), Py_SIZE(co)); _PyCodeObjectExtra *co_extra = (_PyCodeObjectExtra*) co->co_extra; if (co_extra != NULL) { @@ -2193,13 +2197,14 @@ code_sizeof(PyObject *self, PyObject *Py_UNUSED(args)) static PyObject * code_linesiterator(PyObject *self, PyObject *Py_UNUSED(args)) { - PyCodeObject *code = (PyCodeObject*)self; + PyCodeObject *code = _PyCodeObject_CAST(self); return (PyObject *)new_linesiterator(code); } static PyObject * -code_branchesiterator(PyCodeObject *code, PyObject *Py_UNUSED(args)) +code_branchesiterator(PyObject *self, PyObject *Py_UNUSED(args)) { + PyCodeObject *code = _PyCodeObject_CAST(self); return _PyInstrumentation_BranchesIterator(code); } @@ -2208,24 +2213,24 @@ code_branchesiterator(PyCodeObject *code, PyObject *Py_UNUSED(args)) code.replace * - co_argcount: int(c_default="self->co_argcount") = unchanged - co_posonlyargcount: int(c_default="self->co_posonlyargcount") = unchanged - co_kwonlyargcount: int(c_default="self->co_kwonlyargcount") = unchanged - co_nlocals: int(c_default="self->co_nlocals") = unchanged - co_stacksize: int(c_default="self->co_stacksize") = unchanged - co_flags: int(c_default="self->co_flags") = unchanged - co_firstlineno: int(c_default="self->co_firstlineno") = unchanged + co_argcount: int(c_default="((PyCodeObject *)self)->co_argcount") = unchanged + co_posonlyargcount: int(c_default="((PyCodeObject *)self)->co_posonlyargcount") = unchanged + co_kwonlyargcount: int(c_default="((PyCodeObject *)self)->co_kwonlyargcount") = unchanged + co_nlocals: int(c_default="((PyCodeObject *)self)->co_nlocals") = unchanged + co_stacksize: int(c_default="((PyCodeObject *)self)->co_stacksize") = unchanged + co_flags: int(c_default="((PyCodeObject *)self)->co_flags") = unchanged + co_firstlineno: int(c_default="((PyCodeObject *)self)->co_firstlineno") = unchanged co_code: object(subclass_of="&PyBytes_Type", c_default="NULL") = unchanged - co_consts: object(subclass_of="&PyTuple_Type", c_default="self->co_consts") = unchanged - co_names: object(subclass_of="&PyTuple_Type", c_default="self->co_names") = unchanged + co_consts: object(subclass_of="&PyTuple_Type", c_default="((PyCodeObject *)self)->co_consts") = unchanged + co_names: object(subclass_of="&PyTuple_Type", c_default="((PyCodeObject *)self)->co_names") = unchanged co_varnames: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged co_freevars: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged co_cellvars: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged - co_filename: unicode(c_default="self->co_filename") = unchanged - co_name: unicode(c_default="self->co_name") = unchanged - co_qualname: unicode(c_default="self->co_qualname") = unchanged - co_linetable: object(subclass_of="&PyBytes_Type", c_default="self->co_linetable") = unchanged - co_exceptiontable: object(subclass_of="&PyBytes_Type", c_default="self->co_exceptiontable") = unchanged + co_filename: unicode(c_default="((PyCodeObject *)self)->co_filename") = unchanged + co_name: unicode(c_default="((PyCodeObject *)self)->co_name") = unchanged + co_qualname: unicode(c_default="((PyCodeObject *)self)->co_qualname") = unchanged + co_linetable: object(subclass_of="&PyBytes_Type", c_default="((PyCodeObject *)self)->co_linetable") = unchanged + co_exceptiontable: object(subclass_of="&PyBytes_Type", c_default="((PyCodeObject *)self)->co_exceptiontable") = unchanged Return a copy of the code object with new values for the specified fields. [clinic start generated code]*/ @@ -2240,7 +2245,7 @@ code_replace_impl(PyCodeObject *self, int co_argcount, PyObject *co_filename, PyObject *co_name, PyObject *co_qualname, PyObject *co_linetable, PyObject *co_exceptiontable) -/*[clinic end generated code: output=e75c48a15def18b9 input=18e280e07846c122]*/ +/*[clinic end generated code: output=e75c48a15def18b9 input=a455a89c57ac9d42]*/ { #define CHECK_INT_ARG(ARG) \ if (ARG < 0) { \ @@ -2343,7 +2348,7 @@ code__varname_from_oparg_impl(PyCodeObject *self, int oparg) static struct PyMethodDef code_methods[] = { {"__sizeof__", code_sizeof, METH_NOARGS}, {"co_lines", code_linesiterator, METH_NOARGS}, - {"co_branches", (PyCFunction)code_branchesiterator, METH_NOARGS}, + {"co_branches", code_branchesiterator, METH_NOARGS}, {"co_positions", code_positionsiterator, METH_NOARGS}, CODE_REPLACE_METHODDEF CODE__VARNAME_FROM_OPARG_METHODDEF @@ -2358,7 +2363,7 @@ PyTypeObject PyCode_Type = { "code", offsetof(PyCodeObject, co_code_adaptive), sizeof(_Py_CODEUNIT), - (destructor)code_dealloc, /* tp_dealloc */ + code_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -2812,7 +2817,7 @@ copy_code(_Py_CODEUNIT *dst, PyCodeObject *co) for (int i = 0; i < code_len; i += _PyInstruction_GetLength(co, i)) { dst[i] = _Py_GetBaseCodeUnit(co, i); } - _PyCode_Quicken(dst, code_len, co->co_consts, 1); + _PyCode_Quicken(dst, code_len, 1); } static Py_ssize_t diff --git a/Objects/complexobject.c b/Objects/complexobject.c index bf6187efac941f..5d9b3c9f0e3e76 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -14,6 +14,8 @@ #include "pycore_pymath.h" // _Py_ADJUST_ERANGE2() +#define _PyComplexObject_CAST(op) ((PyComplexObject *)(op)) + /*[clinic input] class complex "PyComplexObject *" "&PyComplex_Type" @@ -553,11 +555,12 @@ PyComplex_AsCComplex(PyObject *op) } static PyObject * -complex_repr(PyComplexObject *v) +complex_repr(PyObject *op) { int precision = 0; char format_code = 'r'; PyObject *result = NULL; + PyComplexObject *v = _PyComplexObject_CAST(op); /* If these are non-NULL, they'll need to be freed. */ char *pre = NULL; @@ -609,13 +612,14 @@ complex_repr(PyComplexObject *v) } static Py_hash_t -complex_hash(PyComplexObject *v) +complex_hash(PyObject *op) { Py_uhash_t hashreal, hashimag, combined; - hashreal = (Py_uhash_t)_Py_HashDouble((PyObject *) v, v->cval.real); + PyComplexObject *v = _PyComplexObject_CAST(op); + hashreal = (Py_uhash_t)_Py_HashDouble(op, v->cval.real); if (hashreal == (Py_uhash_t)-1) return -1; - hashimag = (Py_uhash_t)_Py_HashDouble((PyObject *)v, v->cval.imag); + hashimag = (Py_uhash_t)_Py_HashDouble(op, v->cval.imag); if (hashimag == (Py_uhash_t)-1) return -1; /* Note: if the imaginary part is 0, hashimag is 0 now, @@ -753,8 +757,9 @@ complex_pow(PyObject *v, PyObject *w, PyObject *z) } static PyObject * -complex_neg(PyComplexObject *v) +complex_neg(PyObject *op) { + PyComplexObject *v = _PyComplexObject_CAST(op); Py_complex neg; neg.real = -v->cval.real; neg.imag = -v->cval.imag; @@ -762,22 +767,20 @@ complex_neg(PyComplexObject *v) } static PyObject * -complex_pos(PyComplexObject *v) +complex_pos(PyObject *op) { + PyComplexObject *v = _PyComplexObject_CAST(op); if (PyComplex_CheckExact(v)) { return Py_NewRef(v); } - else - return PyComplex_FromCComplex(v->cval); + return PyComplex_FromCComplex(v->cval); } static PyObject * -complex_abs(PyComplexObject *v) +complex_abs(PyObject *op) { - double result; - - result = _Py_c_abs(v->cval); - + PyComplexObject *v = _PyComplexObject_CAST(op); + double result = _Py_c_abs(v->cval); if (errno == ERANGE) { PyErr_SetString(PyExc_OverflowError, "absolute value too large"); @@ -787,8 +790,9 @@ complex_abs(PyComplexObject *v) } static int -complex_bool(PyComplexObject *v) +complex_bool(PyObject *op) { + PyComplexObject *v = _PyComplexObject_CAST(op); return v->cval.real != 0.0 || v->cval.imag != 0.0; } @@ -1339,16 +1343,16 @@ static PyMemberDef complex_members[] = { }; static PyNumberMethods complex_as_number = { - (binaryfunc)complex_add, /* nb_add */ - (binaryfunc)complex_sub, /* nb_subtract */ - (binaryfunc)complex_mul, /* nb_multiply */ + complex_add, /* nb_add */ + complex_sub, /* nb_subtract */ + complex_mul, /* nb_multiply */ 0, /* nb_remainder */ 0, /* nb_divmod */ - (ternaryfunc)complex_pow, /* nb_power */ - (unaryfunc)complex_neg, /* nb_negative */ - (unaryfunc)complex_pos, /* nb_positive */ - (unaryfunc)complex_abs, /* nb_absolute */ - (inquiry)complex_bool, /* nb_bool */ + complex_pow, /* nb_power */ + complex_neg, /* nb_negative */ + complex_pos, /* nb_positive */ + complex_abs, /* nb_absolute */ + complex_bool, /* nb_bool */ 0, /* nb_invert */ 0, /* nb_lshift */ 0, /* nb_rshift */ @@ -1369,7 +1373,7 @@ static PyNumberMethods complex_as_number = { 0, /* nb_inplace_xor */ 0, /* nb_inplace_or */ 0, /* nb_floor_divide */ - (binaryfunc)complex_div, /* nb_true_divide */ + complex_div, /* nb_true_divide */ 0, /* nb_inplace_floor_divide */ 0, /* nb_inplace_true_divide */ }; @@ -1384,11 +1388,11 @@ PyTypeObject PyComplex_Type = { 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_as_async */ - (reprfunc)complex_repr, /* tp_repr */ + complex_repr, /* tp_repr */ &complex_as_number, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ - (hashfunc)complex_hash, /* tp_hash */ + complex_hash, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ PyObject_GenericGetAttr, /* tp_getattro */ diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 4eccd1704eb95a..238becee241d1d 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -1349,7 +1349,7 @@ wrapper_hash(PyObject *self) wrapperobject *wp = (wrapperobject *)self; Py_hash_t x, y; x = PyObject_GenericHash(wp->self); - y = _Py_HashPointer(wp->descr); + y = Py_HashPointer(wp->descr); x = x ^ y; if (x == -1) x = -2; @@ -1508,6 +1508,8 @@ PyWrapper_New(PyObject *d, PyObject *self) /* A built-in 'property' type */ +#define _propertyobject_CAST(op) ((propertyobject *)(op)) + /* class property(object): @@ -1911,8 +1913,9 @@ property_init_impl(propertyobject *self, PyObject *fget, PyObject *fset, } static PyObject * -property_get__name__(propertyobject *prop, void *Py_UNUSED(ignored)) +property_get__name__(PyObject *op, void *Py_UNUSED(ignored)) { + propertyobject *prop = _propertyobject_CAST(op); PyObject *name; if (property_name(prop, &name) < 0) { return NULL; @@ -1925,16 +1928,17 @@ property_get__name__(propertyobject *prop, void *Py_UNUSED(ignored)) } static int -property_set__name__(propertyobject *prop, PyObject *value, - void *Py_UNUSED(ignored)) +property_set__name__(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { + propertyobject *prop = _propertyobject_CAST(op); Py_XSETREF(prop->prop_name, Py_XNewRef(value)); return 0; } static PyObject * -property_get___isabstractmethod__(propertyobject *prop, void *closure) +property_get___isabstractmethod__(PyObject *op, void *closure) { + propertyobject *prop = _propertyobject_CAST(op); int res = _PyObject_IsAbstract(prop->prop_get); if (res == -1) { return NULL; @@ -1962,9 +1966,8 @@ property_get___isabstractmethod__(propertyobject *prop, void *closure) } static PyGetSetDef property_getsetlist[] = { - {"__name__", (getter)property_get__name__, (setter)property_set__name__}, - {"__isabstractmethod__", - (getter)property_get___isabstractmethod__, NULL, + {"__name__", property_get__name__, property_set__name__, NULL, NULL}, + {"__isabstractmethod__", property_get___isabstractmethod__, NULL, NULL, NULL}, {NULL} /* Sentinel */ diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 2a054c3f2ae0ff..91cf013a1dc24b 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -1129,6 +1129,24 @@ dictkeys_generic_lookup(PyDictObject *mp, PyDictKeysObject* dk, PyObject *key, P return do_lookup(mp, dk, key, hash, compare_generic); } +static bool +check_keys_unicode(PyDictKeysObject *dk, PyObject *key) +{ + return PyUnicode_CheckExact(key) && (dk->dk_kind != DICT_KEYS_GENERAL); +} + +static Py_ssize_t +hash_unicode_key(PyObject *key) +{ + assert(PyUnicode_CheckExact(key)); + Py_hash_t hash = unicode_get_hash(key); + if (hash == -1) { + hash = PyUnicode_Type.tp_hash(key); + assert(hash != -1); + } + return hash; +} + #ifdef Py_GIL_DISABLED static Py_ssize_t unicodekeys_lookup_unicode_threadsafe(PyDictKeysObject* dk, PyObject *key, @@ -1167,21 +1185,28 @@ unicodekeys_lookup_split(PyDictKeysObject* dk, PyObject *key, Py_hash_t hash) Py_ssize_t _PyDictKeys_StringLookup(PyDictKeysObject* dk, PyObject *key) { - DictKeysKind kind = dk->dk_kind; - if (!PyUnicode_CheckExact(key) || kind == DICT_KEYS_GENERAL) { + if (!check_keys_unicode(dk, key)) { return DKIX_ERROR; } - Py_hash_t hash = unicode_get_hash(key); - if (hash == -1) { - hash = PyUnicode_Type.tp_hash(key); - if (hash == -1) { - PyErr_Clear(); - return DKIX_ERROR; - } - } + Py_hash_t hash = hash_unicode_key(key); return unicodekeys_lookup_unicode(dk, key, hash); } +Py_ssize_t +_PyDictKeys_StringLookupAndVersion(PyDictKeysObject *dk, PyObject *key, uint32_t *version) +{ + if (!check_keys_unicode(dk, key)) { + return DKIX_ERROR; + } + Py_ssize_t ix; + Py_hash_t hash = hash_unicode_key(key); + LOCK_KEYS(dk); + ix = unicodekeys_lookup_unicode(dk, key, hash); + *version = _PyDictKeys_GetVersionForCurrentState(_PyInterpreterState_GET(), dk); + UNLOCK_KEYS(dk); + return ix; +} + /* Like _PyDictKeys_StringLookup() but only works on split keys. Note * that in free-threaded builds this locks the keys object as required. */ @@ -1634,6 +1659,9 @@ _PyDict_EnablePerThreadRefcounting(PyObject *op) assert(PyDict_Check(op)); #ifdef Py_GIL_DISABLED Py_ssize_t id = _PyObject_AssignUniqueId(op); + if (id == _Py_INVALID_UNIQUE_ID) { + return; + } if ((uint64_t)id >= (uint64_t)DICT_UNIQUE_ID_MAX) { _PyObject_ReleaseUniqueId(id); return; @@ -1641,8 +1669,7 @@ _PyDict_EnablePerThreadRefcounting(PyObject *op) PyDictObject *mp = (PyDictObject *)op; assert((mp->_ma_watcher_tag >> DICT_UNIQUE_ID_SHIFT) == 0); - // Plus 1 so that _ma_watcher_tag=0 represents an unassigned id - mp->_ma_watcher_tag += ((uint64_t)id + 1) << DICT_UNIQUE_ID_SHIFT; + mp->_ma_watcher_tag += (uint64_t)id << DICT_UNIQUE_ID_SHIFT; #endif } @@ -1926,6 +1953,16 @@ build_indices_unicode(PyDictKeysObject *keys, PyDictUnicodeEntry *ep, Py_ssize_t } } +static void +invalidate_and_clear_inline_values(PyDictValues *values) +{ + assert(values->embedded); + FT_ATOMIC_STORE_UINT8(values->valid, 0); + for (int i = 0; i < values->capacity; i++) { + FT_ATOMIC_STORE_PTR_RELEASE(values->values[i], NULL); + } +} + /* Restructure the table by allocating a new table and reinserting all items again. When entries have been deleted, the new table may @@ -2017,7 +2054,7 @@ dictresize(PyInterpreterState *interp, PyDictObject *mp, if (oldvalues->embedded) { assert(oldvalues->embedded == 1); assert(oldvalues->valid == 1); - FT_ATOMIC_STORE_UINT8(oldvalues->valid, 0); + invalidate_and_clear_inline_values(oldvalues); } else { free_values(oldvalues, IS_DICT_SHARED(mp)); @@ -3053,8 +3090,8 @@ PyDict_PopString(PyObject *op, const char *key, PyObject **result) } -PyObject * -_PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value) +static PyObject * +dict_pop_default(PyObject *dict, PyObject *key, PyObject *default_value) { PyObject *result; if (PyDict_Pop(dict, key, &result) == 0) { @@ -3067,6 +3104,12 @@ _PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value) return result; } +PyObject * +_PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value) +{ + return dict_pop_default(dict, key, default_value); +} + static PyDictObject * dict_dict_fromkeys(PyInterpreterState *interp, PyDictObject *mp, PyObject *iterable, PyObject *value) @@ -4205,7 +4248,6 @@ dict___contains__(PyDictObject *self, PyObject *key) } /*[clinic input] -@critical_section dict.get key: object @@ -4217,7 +4259,7 @@ Return the value for key if key is in the dictionary, else default. static PyObject * dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value) -/*[clinic end generated code: output=bba707729dee05bf input=a631d3f18f584c60]*/ +/*[clinic end generated code: output=bba707729dee05bf input=279ddb5790b6b107]*/ { PyObject *val = NULL; Py_hash_t hash; @@ -4428,7 +4470,7 @@ static PyObject * dict_pop_impl(PyDictObject *self, PyObject *key, PyObject *default_value) /*[clinic end generated code: output=3abb47b89f24c21c input=e221baa01044c44c]*/ { - return _PyDict_Pop((PyObject*)self, key, default_value); + return dict_pop_default((PyObject*)self, key, default_value); } /*[clinic input] @@ -7007,7 +7049,13 @@ _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name, PyObject **attr #ifdef Py_GIL_DISABLED PyObject *value = _Py_atomic_load_ptr_acquire(&values->values[ix]); - if (value == NULL || _Py_TryIncrefCompare(&values->values[ix], value)) { + if (value == NULL) { + if (FT_ATOMIC_LOAD_UINT8(values->valid)) { + *attr = NULL; + return true; + } + } + else if (_Py_TryIncrefCompare(&values->values[ix], value)) { *attr = value; return true; } @@ -7303,7 +7351,8 @@ PyObject_ClearManagedDict(PyObject *obj) if (set_or_clear_managed_dict(obj, NULL, true) < 0) { /* Must be out of memory */ assert(PyErr_Occurred() == PyExc_MemoryError); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "clearing an object managed dict"); /* Clear the dict */ PyDictObject *dict = _PyObject_GetManagedDict(obj); Py_BEGIN_CRITICAL_SECTION2(dict, obj); @@ -7345,7 +7394,7 @@ _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) } mp->ma_values = values; - FT_ATOMIC_STORE_UINT8(_PyObject_InlineValues(obj)->valid, 0); + invalidate_and_clear_inline_values(_PyObject_InlineValues(obj)); assert(_PyObject_InlineValuesConsistencyCheck(obj)); ASSERT_CONSISTENT(mp); diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 714f8c828afbc1..ea2733435fc3ec 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -16,6 +16,14 @@ #include "osdefs.h" // SEP +#include "clinic/exceptions.c.h" + +/*[clinic input] +class BaseException "PyBaseExceptionObject *" "&PyExc_BaseException" +class BaseExceptionGroup "PyBaseExceptionGroupObject *" "&PyExc_BaseExceptionGroup" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b7c45e78cff8edc3]*/ + /* Compatibility aliases */ PyObject *PyExc_EnvironmentError = NULL; // borrowed ref @@ -152,30 +160,50 @@ BaseException_traverse(PyBaseExceptionObject *self, visitproc visit, void *arg) static PyObject * BaseException_str(PyBaseExceptionObject *self) { + PyObject *res; + Py_BEGIN_CRITICAL_SECTION(self); switch (PyTuple_GET_SIZE(self->args)) { case 0: - return Py_GetConstant(Py_CONSTANT_EMPTY_STR); + res = Py_GetConstant(Py_CONSTANT_EMPTY_STR); + break; case 1: - return PyObject_Str(PyTuple_GET_ITEM(self->args, 0)); + res = PyObject_Str(PyTuple_GET_ITEM(self->args, 0)); + break; default: - return PyObject_Str(self->args); + res = PyObject_Str(self->args); + break; } + Py_END_CRITICAL_SECTION(); + return res; } static PyObject * BaseException_repr(PyBaseExceptionObject *self) { + PyObject *res; + Py_BEGIN_CRITICAL_SECTION(self); const char *name = _PyType_Name(Py_TYPE(self)); - if (PyTuple_GET_SIZE(self->args) == 1) - return PyUnicode_FromFormat("%s(%R)", name, + if (PyTuple_GET_SIZE(self->args) == 1) { + res = PyUnicode_FromFormat("%s(%R)", name, PyTuple_GET_ITEM(self->args, 0)); - else - return PyUnicode_FromFormat("%s%R", name, self->args); + } + else { + res = PyUnicode_FromFormat("%s%R", name, self->args); + } + Py_END_CRITICAL_SECTION(); + return res; } /* Pickling support */ + +/*[clinic input] +@critical_section +BaseException.__reduce__ +[clinic start generated code]*/ + static PyObject * -BaseException_reduce(PyBaseExceptionObject *self, PyObject *Py_UNUSED(ignored)) +BaseException___reduce___impl(PyBaseExceptionObject *self) +/*[clinic end generated code: output=af87c1247ef98748 input=283be5a10d9c964f]*/ { if (self->args && self->dict) return PyTuple_Pack(3, Py_TYPE(self), self->args, self->dict); @@ -188,8 +216,17 @@ BaseException_reduce(PyBaseExceptionObject *self, PyObject *Py_UNUSED(ignored)) * all their attributes in the __dict__. Code is taken from cPickle's * load_build function. */ + +/*[clinic input] +@critical_section +BaseException.__setstate__ + state: object + / +[clinic start generated code]*/ + static PyObject * -BaseException_setstate(PyObject *self, PyObject *state) +BaseException___setstate___impl(PyBaseExceptionObject *self, PyObject *state) +/*[clinic end generated code: output=f3834889950453ab input=5524b61cfe9b9856]*/ { PyObject *d_key, *d_value; Py_ssize_t i = 0; @@ -202,7 +239,7 @@ BaseException_setstate(PyObject *self, PyObject *state) while (PyDict_Next(state, &i, &d_key, &d_value)) { Py_INCREF(d_key); Py_INCREF(d_value); - int res = PyObject_SetAttr(self, d_key, d_value); + int res = PyObject_SetAttr((PyObject *)self, d_key, d_value); Py_DECREF(d_value); Py_DECREF(d_key); if (res < 0) { @@ -213,18 +250,26 @@ BaseException_setstate(PyObject *self, PyObject *state) Py_RETURN_NONE; } + +/*[clinic input] +@critical_section +BaseException.with_traceback + tb: object + / + +Set self.__traceback__ to tb and return self. +[clinic start generated code]*/ + static PyObject * -BaseException_with_traceback(PyObject *self, PyObject *tb) { - if (PyException_SetTraceback(self, tb)) +BaseException_with_traceback_impl(PyBaseExceptionObject *self, PyObject *tb) +/*[clinic end generated code: output=81e92f2387927f10 input=b5fb64d834717e36]*/ +{ + if (BaseException___traceback___set_impl(self, tb) < 0){ return NULL; - + } return Py_NewRef(self); } -PyDoc_STRVAR(with_traceback_doc, -"Exception.with_traceback(tb) --\n\ - set self.__traceback__ to tb and return self."); - static inline PyBaseExceptionObject* _PyBaseExceptionObject_cast(PyObject *exc) { @@ -232,18 +277,21 @@ _PyBaseExceptionObject_cast(PyObject *exc) return (PyBaseExceptionObject *)exc; } +/*[clinic input] +@critical_section +BaseException.add_note + note: object(subclass_of="&PyUnicode_Type") + / + +Add a note to the exception +[clinic start generated code]*/ + static PyObject * -BaseException_add_note(PyObject *self, PyObject *note) +BaseException_add_note_impl(PyBaseExceptionObject *self, PyObject *note) +/*[clinic end generated code: output=fb7cbcba611c187b input=e60a6b6e9596acaf]*/ { - if (!PyUnicode_Check(note)) { - PyErr_Format(PyExc_TypeError, - "note must be a str, not '%s'", - Py_TYPE(note)->tp_name); - return NULL; - } - PyObject *notes; - if (PyObject_GetOptionalAttr(self, &_Py_ID(__notes__), ¬es) < 0) { + if (PyObject_GetOptionalAttr((PyObject *)self, &_Py_ID(__notes__), ¬es) < 0) { return NULL; } if (notes == NULL) { @@ -251,7 +299,7 @@ BaseException_add_note(PyObject *self, PyObject *note) if (notes == NULL) { return NULL; } - if (PyObject_SetAttr(self, &_Py_ID(__notes__), notes) < 0) { + if (PyObject_SetAttr((PyObject *)self, &_Py_ID(__notes__), notes) < 0) { Py_DECREF(notes); return NULL; } @@ -269,22 +317,23 @@ BaseException_add_note(PyObject *self, PyObject *note) Py_RETURN_NONE; } -PyDoc_STRVAR(add_note_doc, -"Exception.add_note(note) --\n\ - add a note to the exception"); - static PyMethodDef BaseException_methods[] = { - {"__reduce__", (PyCFunction)BaseException_reduce, METH_NOARGS }, - {"__setstate__", (PyCFunction)BaseException_setstate, METH_O }, - {"with_traceback", (PyCFunction)BaseException_with_traceback, METH_O, - with_traceback_doc}, - {"add_note", (PyCFunction)BaseException_add_note, METH_O, - add_note_doc}, - {NULL, NULL, 0, NULL}, + BASEEXCEPTION___REDUCE___METHODDEF + BASEEXCEPTION___SETSTATE___METHODDEF + BASEEXCEPTION_WITH_TRACEBACK_METHODDEF + BASEEXCEPTION_ADD_NOTE_METHODDEF + {NULL, NULL, 0, NULL}, }; +/*[clinic input] +@critical_section +@getter +BaseException.args +[clinic start generated code]*/ + static PyObject * -BaseException_get_args(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) +BaseException_args_get_impl(PyBaseExceptionObject *self) +/*[clinic end generated code: output=e02e34e35cf4d677 input=64282386e4d7822d]*/ { if (self->args == NULL) { Py_RETURN_NONE; @@ -292,23 +341,37 @@ BaseException_get_args(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) return Py_NewRef(self->args); } +/*[clinic input] +@critical_section +@setter +BaseException.args +[clinic start generated code]*/ + static int -BaseException_set_args(PyBaseExceptionObject *self, PyObject *val, void *Py_UNUSED(ignored)) +BaseException_args_set_impl(PyBaseExceptionObject *self, PyObject *value) +/*[clinic end generated code: output=331137e11d8f9e80 input=2400047ea5970a84]*/ { PyObject *seq; - if (val == NULL) { + if (value == NULL) { PyErr_SetString(PyExc_TypeError, "args may not be deleted"); return -1; } - seq = PySequence_Tuple(val); + seq = PySequence_Tuple(value); if (!seq) return -1; Py_XSETREF(self->args, seq); return 0; } +/*[clinic input] +@critical_section +@getter +BaseException.__traceback__ +[clinic start generated code]*/ + static PyObject * -BaseException_get_tb(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) +BaseException___traceback___get_impl(PyBaseExceptionObject *self) +/*[clinic end generated code: output=17cf874a52339398 input=a2277f0de62170cf]*/ { if (self->traceback == NULL) { Py_RETURN_NONE; @@ -316,17 +379,26 @@ BaseException_get_tb(PyBaseExceptionObject *self, void *Py_UNUSED(ignored)) return Py_NewRef(self->traceback); } + +/*[clinic input] +@critical_section +@setter +BaseException.__traceback__ +[clinic start generated code]*/ + static int -BaseException_set_tb(PyBaseExceptionObject *self, PyObject *tb, void *Py_UNUSED(ignored)) +BaseException___traceback___set_impl(PyBaseExceptionObject *self, + PyObject *value) +/*[clinic end generated code: output=a82c86d9f29f48f0 input=12676035676badad]*/ { - if (tb == NULL) { + if (value == NULL) { PyErr_SetString(PyExc_TypeError, "__traceback__ may not be deleted"); return -1; } - if (PyTraceBack_Check(tb)) { - Py_XSETREF(self->traceback, Py_NewRef(tb)); + if (PyTraceBack_Check(value)) { + Py_XSETREF(self->traceback, Py_NewRef(value)); } - else if (tb == Py_None) { + else if (value == Py_None) { Py_CLEAR(self->traceback); } else { @@ -337,73 +409,100 @@ BaseException_set_tb(PyBaseExceptionObject *self, PyObject *tb, void *Py_UNUSED( return 0; } +/*[clinic input] +@critical_section +@getter +BaseException.__context__ +[clinic start generated code]*/ + static PyObject * -BaseException_get_context(PyObject *self, void *Py_UNUSED(ignored)) +BaseException___context___get_impl(PyBaseExceptionObject *self) +/*[clinic end generated code: output=6ec5d296ce8d1c93 input=b2d22687937e66ab]*/ { - PyObject *res = PyException_GetContext(self); - if (res) - return res; /* new reference already returned above */ - Py_RETURN_NONE; + if (self->context == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(self->context); } +/*[clinic input] +@critical_section +@setter +BaseException.__context__ +[clinic start generated code]*/ + static int -BaseException_set_context(PyObject *self, PyObject *arg, void *Py_UNUSED(ignored)) +BaseException___context___set_impl(PyBaseExceptionObject *self, + PyObject *value) +/*[clinic end generated code: output=b4cb52dcca1da3bd input=c0971adf47fa1858]*/ { - if (arg == NULL) { + if (value == NULL) { PyErr_SetString(PyExc_TypeError, "__context__ may not be deleted"); return -1; - } else if (arg == Py_None) { - arg = NULL; - } else if (!PyExceptionInstance_Check(arg)) { + } else if (value == Py_None) { + value = NULL; + } else if (!PyExceptionInstance_Check(value)) { PyErr_SetString(PyExc_TypeError, "exception context must be None " "or derive from BaseException"); return -1; } else { - /* PyException_SetContext steals this reference */ - Py_INCREF(arg); + Py_INCREF(value); } - PyException_SetContext(self, arg); + Py_XSETREF(self->context, value); return 0; } +/*[clinic input] +@critical_section +@getter +BaseException.__cause__ +[clinic start generated code]*/ + static PyObject * -BaseException_get_cause(PyObject *self, void *Py_UNUSED(ignored)) +BaseException___cause___get_impl(PyBaseExceptionObject *self) +/*[clinic end generated code: output=987f6c4d8a0bdbab input=40e0eac427b6e602]*/ { - PyObject *res = PyException_GetCause(self); - if (res) - return res; /* new reference already returned above */ - Py_RETURN_NONE; + if (self->cause == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(self->cause); } +/*[clinic input] +@critical_section +@setter +BaseException.__cause__ +[clinic start generated code]*/ + static int -BaseException_set_cause(PyObject *self, PyObject *arg, void *Py_UNUSED(ignored)) +BaseException___cause___set_impl(PyBaseExceptionObject *self, + PyObject *value) +/*[clinic end generated code: output=6161315398aaf541 input=e1b403c0bde3f62a]*/ { - if (arg == NULL) { + if (value == NULL) { PyErr_SetString(PyExc_TypeError, "__cause__ may not be deleted"); return -1; - } else if (arg == Py_None) { - arg = NULL; - } else if (!PyExceptionInstance_Check(arg)) { + } else if (value == Py_None) { + value = NULL; + } else if (!PyExceptionInstance_Check(value)) { PyErr_SetString(PyExc_TypeError, "exception cause must be None " "or derive from BaseException"); return -1; } else { /* PyException_SetCause steals this reference */ - Py_INCREF(arg); + Py_INCREF(value); } - PyException_SetCause(self, arg); + PyException_SetCause((PyObject *)self, value); return 0; } static PyGetSetDef BaseException_getset[] = { {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict}, - {"args", (getter)BaseException_get_args, (setter)BaseException_set_args}, - {"__traceback__", (getter)BaseException_get_tb, (setter)BaseException_set_tb}, - {"__context__", BaseException_get_context, - BaseException_set_context, PyDoc_STR("exception context")}, - {"__cause__", BaseException_get_cause, - BaseException_set_cause, PyDoc_STR("exception cause")}, + BASEEXCEPTION_ARGS_GETSETDEF + BASEEXCEPTION___TRACEBACK___GETSETDEF + BASEEXCEPTION___CONTEXT___GETSETDEF + BASEEXCEPTION___CAUSE___GETSETDEF {NULL}, }; @@ -411,59 +510,81 @@ static PyGetSetDef BaseException_getset[] = { PyObject * PyException_GetTraceback(PyObject *self) { - PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); - return Py_XNewRef(base_self->traceback); + PyObject *traceback; + Py_BEGIN_CRITICAL_SECTION(self); + traceback = Py_XNewRef(_PyBaseExceptionObject_cast(self)->traceback); + Py_END_CRITICAL_SECTION(); + return traceback; } int PyException_SetTraceback(PyObject *self, PyObject *tb) { - return BaseException_set_tb(_PyBaseExceptionObject_cast(self), tb, NULL); + int res; + Py_BEGIN_CRITICAL_SECTION(self); + res = BaseException___traceback___set_impl(_PyBaseExceptionObject_cast(self), tb); + Py_END_CRITICAL_SECTION(); + return res; } PyObject * PyException_GetCause(PyObject *self) { - PyObject *cause = _PyBaseExceptionObject_cast(self)->cause; - return Py_XNewRef(cause); + PyObject *cause; + Py_BEGIN_CRITICAL_SECTION(self); + cause = Py_XNewRef(_PyBaseExceptionObject_cast(self)->cause); + Py_END_CRITICAL_SECTION(); + return cause; } /* Steals a reference to cause */ void PyException_SetCause(PyObject *self, PyObject *cause) { + Py_BEGIN_CRITICAL_SECTION(self); PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); base_self->suppress_context = 1; Py_XSETREF(base_self->cause, cause); + Py_END_CRITICAL_SECTION(); } PyObject * PyException_GetContext(PyObject *self) { - PyObject *context = _PyBaseExceptionObject_cast(self)->context; - return Py_XNewRef(context); + PyObject *context; + Py_BEGIN_CRITICAL_SECTION(self); + context = Py_XNewRef(_PyBaseExceptionObject_cast(self)->context); + Py_END_CRITICAL_SECTION(); + return context; } /* Steals a reference to context */ void PyException_SetContext(PyObject *self, PyObject *context) { + Py_BEGIN_CRITICAL_SECTION(self); Py_XSETREF(_PyBaseExceptionObject_cast(self)->context, context); + Py_END_CRITICAL_SECTION(); } PyObject * PyException_GetArgs(PyObject *self) { - PyObject *args = _PyBaseExceptionObject_cast(self)->args; - return Py_NewRef(args); + PyObject *args; + Py_BEGIN_CRITICAL_SECTION(self); + args = Py_NewRef(_PyBaseExceptionObject_cast(self)->args); + Py_END_CRITICAL_SECTION(); + return args; } void PyException_SetArgs(PyObject *self, PyObject *args) { + Py_BEGIN_CRITICAL_SECTION(self); Py_INCREF(args); Py_XSETREF(_PyBaseExceptionObject_cast(self)->args, args); + Py_END_CRITICAL_SECTION(); } const char * @@ -914,10 +1035,18 @@ BaseExceptionGroup_str(PyBaseExceptionGroupObject *self) self->msg, num_excs, num_excs > 1 ? "s" : ""); } +/*[clinic input] +@critical_section +BaseExceptionGroup.derive + excs: object + / +[clinic start generated code]*/ + static PyObject * -BaseExceptionGroup_derive(PyObject *self_, PyObject *excs) +BaseExceptionGroup_derive_impl(PyBaseExceptionGroupObject *self, + PyObject *excs) +/*[clinic end generated code: output=4307564218dfbf06 input=f72009d38e98cec1]*/ { - PyBaseExceptionGroupObject *self = _PyBaseExceptionGroupObject_cast(self_); PyObject *init_args = PyTuple_Pack(2, self->msg, excs); if (!init_args) { return NULL; @@ -1210,8 +1339,17 @@ exceptiongroup_split_recursive(PyObject *exc, return retval; } +/*[clinic input] +@critical_section +BaseExceptionGroup.split + matcher_value: object + / +[clinic start generated code]*/ + static PyObject * -BaseExceptionGroup_split(PyObject *self, PyObject *matcher_value) +BaseExceptionGroup_split_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value) +/*[clinic end generated code: output=d74db579da4df6e2 input=0c5cfbfed57e0052]*/ { _exceptiongroup_split_matcher_type matcher_type; if (get_matcher_type(matcher_value, &matcher_type) < 0) { @@ -1221,7 +1359,7 @@ BaseExceptionGroup_split(PyObject *self, PyObject *matcher_value) _exceptiongroup_split_result split_result; bool construct_rest = true; if (exceptiongroup_split_recursive( - self, matcher_type, matcher_value, + (PyObject *)self, matcher_type, matcher_value, construct_rest, &split_result) < 0) { return NULL; } @@ -1236,8 +1374,17 @@ BaseExceptionGroup_split(PyObject *self, PyObject *matcher_value) return result; } +/*[clinic input] +@critical_section +BaseExceptionGroup.subgroup + matcher_value: object + / +[clinic start generated code]*/ + static PyObject * -BaseExceptionGroup_subgroup(PyObject *self, PyObject *matcher_value) +BaseExceptionGroup_subgroup_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value) +/*[clinic end generated code: output=07dbec8f77d4dd8e input=988ffdd755a151ce]*/ { _exceptiongroup_split_matcher_type matcher_type; if (get_matcher_type(matcher_value, &matcher_type) < 0) { @@ -1247,7 +1394,7 @@ BaseExceptionGroup_subgroup(PyObject *self, PyObject *matcher_value) _exceptiongroup_split_result split_result; bool construct_rest = false; if (exceptiongroup_split_recursive( - self, matcher_type, matcher_value, + (PyObject *)self, matcher_type, matcher_value, construct_rest, &split_result) < 0) { return NULL; } @@ -1513,9 +1660,9 @@ static PyMemberDef BaseExceptionGroup_members[] = { static PyMethodDef BaseExceptionGroup_methods[] = { {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, - {"derive", (PyCFunction)BaseExceptionGroup_derive, METH_O}, - {"split", (PyCFunction)BaseExceptionGroup_split, METH_O}, - {"subgroup", (PyCFunction)BaseExceptionGroup_subgroup, METH_O}, + BASEEXCEPTIONGROUP_DERIVE_METHODDEF + BASEEXCEPTIONGROUP_SPLIT_METHODDEF + BASEEXCEPTIONGROUP_SUBGROUP_METHODDEF {NULL} }; @@ -2834,8 +2981,10 @@ unicode_error_set_end_impl(PyObject *self, Py_ssize_t end) * The 'start' can be negative or not, but when adjusting the value, * we clip it in [0, max(0, objlen - 1)] and do not interpret it as * a relative offset. + * + * This function always succeeds. */ -static inline Py_ssize_t +static Py_ssize_t unicode_error_adjust_start(Py_ssize_t start, Py_ssize_t objlen) { assert(objlen >= 0); @@ -2849,14 +2998,34 @@ unicode_error_adjust_start(Py_ssize_t start, Py_ssize_t objlen) } +/* Assert some properties of the adjusted 'start' value. */ +#ifndef NDEBUG +static void +assert_adjusted_unicode_error_start(Py_ssize_t start, Py_ssize_t objlen) +{ + assert(objlen >= 0); + /* in the future, `min_start` may be something else */ + Py_ssize_t min_start = 0; + assert(start >= min_start); + /* in the future, `max_start` may be something else */ + Py_ssize_t max_start = Py_MAX(min_start, objlen - 1); + assert(start <= max_start); +} +#else +#define assert_adjusted_unicode_error_start(...) +#endif + + /* * Adjust the (exclusive) 'end' value of a UnicodeError object. * * The 'end' can be negative or not, but when adjusting the value, * we clip it in [min(1, objlen), max(min(1, objlen), objlen)] and * do not interpret it as a relative offset. + * + * This function always succeeds. */ -static inline Py_ssize_t +static Py_ssize_t unicode_error_adjust_end(Py_ssize_t end, Py_ssize_t objlen) { assert(objlen >= 0); @@ -2870,6 +3039,59 @@ unicode_error_adjust_end(Py_ssize_t end, Py_ssize_t objlen) } +/* Assert some properties of the adjusted 'end' value. */ +#ifndef NDEBUG +static void +assert_adjusted_unicode_error_end(Py_ssize_t end, Py_ssize_t objlen) +{ + assert(objlen >= 0); + /* in the future, `min_end` may be something else */ + Py_ssize_t min_end = Py_MIN(1, objlen); + assert(end >= min_end); + /* in the future, `max_end` may be something else */ + Py_ssize_t max_end = Py_MAX(min_end, objlen); + assert(end <= max_end); +} +#else +#define assert_adjusted_unicode_error_end(...) +#endif + + +/* + * Adjust the length of the range described by a UnicodeError object. + * + * The 'start' and 'end' arguments must have been obtained by + * unicode_error_adjust_start() and unicode_error_adjust_end(). + * + * The result is clipped in [0, objlen]. By construction, it + * will always be smaller than 'objlen' as 'start' and 'end' + * are smaller than 'objlen'. + */ +static Py_ssize_t +unicode_error_adjust_len(Py_ssize_t start, Py_ssize_t end, Py_ssize_t objlen) +{ + assert_adjusted_unicode_error_start(start, objlen); + assert_adjusted_unicode_error_end(end, objlen); + Py_ssize_t ranlen = end - start; + assert(ranlen <= objlen); + return ranlen < 0 ? 0 : ranlen; +} + + +/* Assert some properties of the adjusted range 'len' value. */ +#ifndef NDEBUG +static void +assert_adjusted_unicode_error_len(Py_ssize_t ranlen, Py_ssize_t objlen) +{ + assert(objlen >= 0); + assert(ranlen >= 0); + assert(ranlen <= objlen); +} +#else +#define assert_adjusted_unicode_error_len(...) +#endif + + /* * Get various common parameters of a UnicodeError object. * @@ -2884,22 +3106,24 @@ unicode_error_adjust_end(Py_ssize_t end, Py_ssize_t objlen) * objlen The 'object' length. * start The clipped 'start' attribute. * end The clipped 'end' attribute. + * slen The length of the slice described by the clipped 'start' + * and 'end' values. It always lies in [0, objlen]. * * An output parameter can be NULL to indicate that * the corresponding value does not need to be stored. * * Input parameter: * - * as_bytes If 1, the error's 'object' attribute must be a bytes object, - * i.e. the call is for a `UnicodeDecodeError`. Otherwise, the - * 'object' attribute must be a string. + * as_bytes If true, the error's 'object' attribute must be a `bytes`, + * i.e. 'self' is a `UnicodeDecodeError` instance. Otherwise, + * the 'object' attribute must be a string. * * A TypeError is raised if the 'object' type is incompatible. */ int _PyUnicodeError_GetParams(PyObject *self, PyObject **obj, Py_ssize_t *objlen, - Py_ssize_t *start, Py_ssize_t *end, + Py_ssize_t *start, Py_ssize_t *end, Py_ssize_t *slen, int as_bytes) { assert(self != NULL); @@ -2914,16 +3138,30 @@ _PyUnicodeError_GetParams(PyObject *self, if (objlen != NULL) { *objlen = n; } + + Py_ssize_t start_value = -1; + if (start != NULL || slen != NULL) { + start_value = unicode_error_adjust_start(exc->start, n); + } if (start != NULL) { - *start = unicode_error_adjust_start(exc->start, n); - assert(*start >= 0); - assert(*start <= n); + assert_adjusted_unicode_error_start(start_value, n); + *start = start_value; + } + + Py_ssize_t end_value = -1; + if (end != NULL || slen != NULL) { + end_value = unicode_error_adjust_end(exc->end, n); } if (end != NULL) { - *end = unicode_error_adjust_end(exc->end, n); - assert(*end >= 0); - assert(*end <= n); + assert_adjusted_unicode_error_end(end_value, n); + *end = end_value; } + + if (slen != NULL) { + *slen = unicode_error_adjust_len(start_value, end_value, n); + assert_adjusted_unicode_error_len(*slen, n); + } + if (obj != NULL) { *obj = r; } @@ -2991,7 +3229,9 @@ static inline int unicode_error_get_start_impl(PyObject *self, Py_ssize_t *start, int as_bytes) { assert(self != NULL); - return _PyUnicodeError_GetParams(self, NULL, NULL, start, NULL, as_bytes); + return _PyUnicodeError_GetParams(self, NULL, NULL, + start, NULL, NULL, + as_bytes); } @@ -3057,7 +3297,9 @@ static inline int unicode_error_get_end_impl(PyObject *self, Py_ssize_t *end, int as_bytes) { assert(self != NULL); - return _PyUnicodeError_GetParams(self, NULL, NULL, NULL, end, as_bytes); + return _PyUnicodeError_GetParams(self, NULL, NULL, + NULL, end, NULL, + as_bytes); } diff --git a/Objects/fileobject.c b/Objects/fileobject.c index c377d1bb28b56f..7025b5bcffc1c8 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -34,7 +34,7 @@ PyFile_FromFd(int fd, const char *name, const char *mode, int buffering, const c PyObject *open, *stream; /* import _io in case we are being used to open io.py */ - open = _PyImport_GetModuleAttrString("_io", "open"); + open = PyImport_ImportModuleAttrString("_io", "open"); if (open == NULL) return NULL; stream = PyObject_CallFunction(open, "isisssO", fd, mode, @@ -506,7 +506,7 @@ PyFile_OpenCodeObject(PyObject *path) if (hook) { f = hook(path, _PyRuntime.open_code_userdata); } else { - PyObject *open = _PyImport_GetModuleAttrString("_io", "open"); + PyObject *open = PyImport_ImportModuleAttrString("_io", "open"); if (open) { f = PyObject_CallFunction(open, "Os", path, "rb"); Py_DECREF(open); diff --git a/Objects/floatobject.c b/Objects/floatobject.c index bcc77287454768..7ca43033d722ab 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -428,9 +428,10 @@ float_richcompare(PyObject *v, PyObject *w, int op) else if (PyLong_Check(w)) { int vsign = i == 0.0 ? 0 : i < 0.0 ? -1 : 1; - int wsign = _PyLong_Sign(w); + int wsign; int exponent; + (void)PyLong_GetSign(w, &wsign); if (vsign != wsign) { /* Magnitudes are irrelevant -- the signs alone * determine the outcome. diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 4f0040df4f3017..15ec4b78235992 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -1681,6 +1681,15 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure) return 0; } +static PyObject * +frame_getgenerator(PyFrameObject *f, void *arg) { + if (f->f_frame->owner == FRAME_OWNED_BY_GENERATOR) { + PyObject *gen = (PyObject *)_PyGen_GetGeneratorFromFrame(f->f_frame); + return Py_NewRef(gen); + } + Py_RETURN_NONE; +} + static PyGetSetDef frame_getsetlist[] = { {"f_back", (getter)frame_getback, NULL, NULL}, @@ -1693,6 +1702,7 @@ static PyGetSetDef frame_getsetlist[] = { {"f_builtins", (getter)frame_getbuiltins, NULL, NULL}, {"f_code", (getter)frame_getcode, NULL, NULL}, {"f_trace_opcodes", (getter)frame_gettrace_opcodes, (setter)frame_settrace_opcodes, NULL}, + {"f_generator", (getter)frame_getgenerator, NULL, NULL}, {0} }; @@ -2142,7 +2152,7 @@ _PyFrame_IsEntryFrame(PyFrameObject *frame) assert(frame != NULL); _PyInterpreterFrame *f = frame->f_frame; assert(!_PyFrame_IsIncomplete(f)); - return f->previous && f->previous->owner == FRAME_OWNED_BY_CSTACK; + return f->previous && f->previous->owner == FRAME_OWNED_BY_INTERPRETER; } PyCodeObject * diff --git a/Objects/genobject.c b/Objects/genobject.c index e87f199c2504ba..73bbf86588c457 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -134,6 +134,19 @@ _PyGen_Finalize(PyObject *self) PyErr_SetRaisedException(exc); } +static void +gen_clear_frame(PyGenObject *gen) +{ + if (gen->gi_frame_state == FRAME_CLEARED) + return; + + gen->gi_frame_state = FRAME_CLEARED; + _PyInterpreterFrame *frame = &gen->gi_iframe; + frame->previous = NULL; + _PyFrame_ClearExceptCode(frame); + _PyErr_ClearExcState(&gen->gi_exc_state); +} + static void gen_dealloc(PyObject *self) { @@ -159,13 +172,7 @@ gen_dealloc(PyObject *self) if (PyCoro_CheckExact(gen)) { Py_CLEAR(((PyCoroObject *)gen)->cr_origin_or_finalizer); } - if (gen->gi_frame_state != FRAME_CLEARED) { - _PyInterpreterFrame *frame = &gen->gi_iframe; - gen->gi_frame_state = FRAME_CLEARED; - frame->previous = NULL; - _PyFrame_ClearExceptCode(frame); - _PyErr_ClearExcState(&gen->gi_exc_state); - } + gen_clear_frame(gen); assert(gen->gi_exc_state.exc_value == NULL); PyStackRef_CLEAR(gen->gi_iframe.f_executable); Py_CLEAR(gen->gi_name); @@ -400,7 +407,7 @@ gen_close(PyObject *self, PyObject *args) // RESUME after YIELD_VALUE and exception depth is 1 assert((oparg & RESUME_OPARG_LOCATION_MASK) != RESUME_AT_FUNC_START); gen->gi_frame_state = FRAME_COMPLETED; - _PyFrame_ClearLocals(&gen->gi_iframe); + gen_clear_frame(gen); Py_RETURN_NONE; } } @@ -633,30 +640,19 @@ gen_iternext(PyObject *self) int _PyGen_SetStopIterationValue(PyObject *value) { - PyObject *e; - - if (value == NULL || - (!PyTuple_Check(value) && !PyExceptionInstance_Check(value))) - { - /* Delay exception instantiation if we can */ - PyErr_SetObject(PyExc_StopIteration, value); - return 0; - } - /* Construct an exception instance manually with - * PyObject_CallOneArg and pass it to PyErr_SetObject. - * - * We do this to handle a situation when "value" is a tuple, in which - * case PyErr_SetObject would set the value of StopIteration to - * the first element of the tuple. - * - * (See PyErr_SetObject/_PyErr_CreateException code for details.) - */ - e = PyObject_CallOneArg(PyExc_StopIteration, value); - if (e == NULL) { + assert(!PyErr_Occurred()); + // Construct an exception instance manually with PyObject_CallOneArg() + // but use PyErr_SetRaisedException() instead of PyErr_SetObject() as + // PyErr_SetObject(exc_type, value) has a fast path when 'value' + // is a tuple, where the value of the StopIteration exception would be + // set to 'value[0]' instead of 'value'. + PyObject *exc = value == NULL + ? PyObject_CallNoArgs(PyExc_StopIteration) + : PyObject_CallOneArg(PyExc_StopIteration, value); + if (exc == NULL) { return -1; } - PyErr_SetObject(PyExc_StopIteration, e); - Py_DECREF(e); + PyErr_SetRaisedException(exc /* stolen */); return 0; } @@ -1157,7 +1153,6 @@ cr_getcode(PyObject *coro, void *Py_UNUSED(ignored)) return _gen_getcode(_PyGen_CAST(coro), "cr_code"); } - static PyGetSetDef coro_getsetlist[] = { {"__name__", gen_get_name, gen_set_name, PyDoc_STR("name of the coroutine")}, diff --git a/Objects/iterobject.c b/Objects/iterobject.c index 135ced9ea1f268..ebb342ff109222 100644 --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -384,6 +384,7 @@ anextawaitable_iternext(anextawaitableobject *obj) return result; } if (PyErr_ExceptionMatches(PyExc_StopAsyncIteration)) { + PyErr_Clear(); _PyGen_SetStopIterationValue(obj->default_value); } return NULL; @@ -407,6 +408,7 @@ anextawaitable_proxy(anextawaitableobject *obj, char *meth, PyObject *arg) { * exception we replace it with a `StopIteration(default)`, as if * it was the return value of `__anext__()` coroutine. */ + PyErr_Clear(); _PyGen_SetStopIterationValue(obj->default_value); } return NULL; diff --git a/Objects/listobject.c b/Objects/listobject.c index bbd53e7de94a31..f4a269e4d7b284 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -3190,7 +3190,7 @@ _PyList_AsTupleAndClear(PyListObject *self) } PyObject * -_PyList_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) +_PyList_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n) { if (n == 0) { return PyList_New(0); @@ -3198,9 +3198,6 @@ _PyList_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) PyListObject *list = (PyListObject *)PyList_New(n); if (list == NULL) { - for (Py_ssize_t i = 0; i < n; i++) { - PyStackRef_CLOSE(src[i]); - } return NULL; } @@ -3906,15 +3903,17 @@ PyTypeObject PyListIter_Type = { static PyObject * list_iter(PyObject *seq) { - _PyListIterObject *it; - if (!PyList_Check(seq)) { PyErr_BadInternalCall(); return NULL; } - it = PyObject_GC_New(_PyListIterObject, &PyListIter_Type); - if (it == NULL) - return NULL; + _PyListIterObject *it = _Py_FREELIST_POP(_PyListIterObject, list_iters); + if (it == NULL) { + it = PyObject_GC_New(_PyListIterObject, &PyListIter_Type); + if (it == NULL) { + return NULL; + } + } it->it_index = 0; it->it_seq = (PyListObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); @@ -3927,7 +3926,8 @@ listiter_dealloc(PyObject *self) _PyListIterObject *it = (_PyListIterObject *)self; _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); - PyObject_GC_Del(it); + assert(Py_IS_TYPE(self, &PyListIter_Type)); + _Py_FREELIST_FREE(list_iters, it, PyObject_GC_Del); } static int diff --git a/Objects/longobject.c b/Objects/longobject.c index d449a01cedf886..370328dcfe8c9a 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -152,8 +152,8 @@ long_normalize(PyLongObject *v) # define MAX_LONG_DIGITS ((INT64_MAX-1) / PyLong_SHIFT) #endif -PyLongObject * -_PyLong_New(Py_ssize_t size) +static PyLongObject * +long_alloc(Py_ssize_t size) { assert(size >= 0); PyLongObject *result = NULL; @@ -190,6 +190,12 @@ _PyLong_New(Py_ssize_t size) return result; } +PyLongObject * +_PyLong_New(Py_ssize_t size) +{ + return long_alloc(size); +} + PyLongObject * _PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits) { @@ -197,9 +203,8 @@ _PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits) if (digit_count == 0) { return (PyLongObject *)_PyLong_GetZero(); } - PyLongObject *result = _PyLong_New(digit_count); + PyLongObject *result = long_alloc(digit_count); if (result == NULL) { - PyErr_NoMemory(); return NULL; } _PyLong_SetSignAndDigitCount(result, negative?-1:1, digit_count); @@ -211,15 +216,29 @@ PyObject * _PyLong_Copy(PyLongObject *src) { assert(src != NULL); + int sign; if (_PyLong_IsCompact(src)) { stwodigits ival = medium_value(src); if (IS_SMALL_INT(ival)) { return get_small_int((sdigit)ival); } + sign = _PyLong_CompactSign(src); } + else { + sign = _PyLong_NonCompactSign(src); + } + Py_ssize_t size = _PyLong_DigitCount(src); - return (PyObject *)_PyLong_FromDigits(_PyLong_IsNegative(src), size, src->long_value.ob_digit); + PyLongObject *result = long_alloc(size); + + if (result == NULL) { + return NULL; + } + _PyLong_SetSignAndDigitCount(result, sign, size); + memcpy(result->long_value.ob_digit, src->long_value.ob_digit, + size * sizeof(digit)); + return (PyObject *)result; } static PyObject * @@ -268,7 +287,7 @@ _PyLong_FromLarge(stwodigits ival) ++ndigits; t >>= PyLong_SHIFT; } - PyLongObject *v = _PyLong_New(ndigits); + PyLongObject *v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, sign, ndigits); @@ -341,7 +360,7 @@ PyLong_FromLong(long ival) } /* Construct output value. */ - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, ival < 0 ? -1 : 1, ndigits); @@ -359,14 +378,18 @@ PyLong_FromLong(long ival) if (IS_SMALL_UINT(ival)) { \ return get_small_int((sdigit)(ival)); \ } \ + if ((ival) <= PyLong_MASK) { \ + return _PyLong_FromMedium((sdigit)(ival)); \ + } \ + /* Do shift in two steps to avoid possible undefined behavior. */ \ + INT_TYPE t = (ival) >> PyLong_SHIFT >> PyLong_SHIFT; \ /* Count the number of Python digits. */ \ - Py_ssize_t ndigits = 0; \ - INT_TYPE t = (ival); \ + Py_ssize_t ndigits = 2; \ while (t) { \ ++ndigits; \ t >>= PyLong_SHIFT; \ } \ - PyLongObject *v = _PyLong_New(ndigits); \ + PyLongObject *v = long_alloc(ndigits); \ if (v == NULL) { \ return NULL; \ } \ @@ -443,7 +466,7 @@ PyLong_FromDouble(double dval) frac = frexp(dval, &expo); /* dval = frac*2**expo; 0.0 <= frac < 1.0 */ assert(expo > 0); ndig = (expo-1) / PyLong_SHIFT + 1; /* Number of 'digits' in result */ - v = _PyLong_New(ndig); + v = long_alloc(ndig); if (v == NULL) return NULL; frac = ldexp(frac, (expo-1) % PyLong_SHIFT + 1); @@ -827,19 +850,25 @@ PyLong_IsZero(PyObject *obj) return _PyLong_IsZero((PyLongObject *)obj); } -int -_PyLong_Sign(PyObject *vv) +static int +long_sign(PyObject *vv) { + assert(vv != NULL); + assert(PyLong_Check(vv)); PyLongObject *v = (PyLongObject *)vv; - assert(v != NULL); - assert(PyLong_Check(v)); if (_PyLong_IsCompact(v)) { return _PyLong_CompactSign(v); } return _PyLong_NonCompactSign(v); } +int +_PyLong_Sign(PyObject *vv) +{ + return long_sign(vv); +} + int PyLong_GetSign(PyObject *vv, int *sign) { @@ -848,7 +877,7 @@ PyLong_GetSign(PyObject *vv, int *sign) return -1; } - *sign = _PyLong_Sign(vv); + *sign = long_sign(vv); return 0; } @@ -946,7 +975,7 @@ _PyLong_FromByteArray(const unsigned char* bytes, size_t n, return NULL; } ndigits = (numsignificantbytes * 8 + PyLong_SHIFT - 1) / PyLong_SHIFT; - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v == NULL) return NULL; @@ -1476,7 +1505,7 @@ PyLong_FromLongLong(long long ival) } /* Construct output value. */ - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, ival < 0 ? -1 : 1, ndigits); @@ -1519,7 +1548,7 @@ PyLong_FromSsize_t(Py_ssize_t ival) ++ndigits; t >>= PyLong_SHIFT; } - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, negative ? -1 : 1, ndigits); @@ -2009,7 +2038,7 @@ divrem1(PyLongObject *a, digit n, digit *prem) PyLongObject *z; assert(n > 0 && n <= PyLong_MASK); - z = _PyLong_New(size); + z = long_alloc(size); if (z == NULL) return NULL; *prem = inplace_divrem1(z->long_value.ob_digit, a->long_value.ob_digit, size, n); @@ -2186,7 +2215,7 @@ long_to_decimal_string_internal(PyObject *aa, (10 * PyLong_SHIFT - 33 * _PyLong_DECIMAL_SHIFT); assert(size_a < PY_SSIZE_T_MAX/2); size = 1 + size_a + size_a / d; - scratch = _PyLong_New(size); + scratch = long_alloc(size); if (scratch == NULL) return -1; @@ -2629,7 +2658,7 @@ long_from_binary_base(const char *start, const char *end, Py_ssize_t digits, int return 0; } n = (digits * bits_per_char + PyLong_SHIFT - 1) / PyLong_SHIFT; - z = _PyLong_New(n); + z = long_alloc(n); if (z == NULL) { *res = NULL; return 0; @@ -2833,7 +2862,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, */ double fsize_z = (double)digits * log_base_BASE[base] + 1.0; if (fsize_z > (double)MAX_LONG_DIGITS) { - /* The same exception as in _PyLong_New(). */ + /* The same exception as in long_alloc(). */ PyErr_SetString(PyExc_OverflowError, "too many digits in integer"); *res = NULL; @@ -2843,7 +2872,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, /* Uncomment next line to test exceedingly rare copy code */ /* size_z = 1; */ assert(size_z > 0); - z = _PyLong_New(size_z); + z = long_alloc(size_z); if (z == NULL) { *res = NULL; return 0; @@ -2906,7 +2935,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, PyLongObject *tmp; /* Extremely rare. Get more space. */ assert(_PyLong_DigitCount(z) == size_z); - tmp = _PyLong_New(size_z + 1); + tmp = long_alloc(size_z + 1); if (tmp == NULL) { Py_DECREF(z); *res = NULL; @@ -3327,12 +3356,12 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) size_v = _PyLong_DigitCount(v1); size_w = _PyLong_DigitCount(w1); assert(size_v >= size_w && size_w >= 2); /* Assert checks by div() */ - v = _PyLong_New(size_v+1); + v = long_alloc(size_v+1); if (v == NULL) { *prem = NULL; return NULL; } - w = _PyLong_New(size_w); + w = long_alloc(size_w); if (w == NULL) { Py_DECREF(v); *prem = NULL; @@ -3354,7 +3383,7 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) at most (and usually exactly) k = size_v - size_w digits. */ k = size_v - size_w; assert(k >= 0); - a = _PyLong_New(k); + a = long_alloc(k); if (a == NULL) { Py_DECREF(w); Py_DECREF(v); @@ -3622,32 +3651,25 @@ long_richcompare(PyObject *self, PyObject *other, int op) } static inline int -compact_int_is_small(PyObject *self) +/// Return 1 if the object is one of the immortal small ints +_long_is_small_int(PyObject *op) { - PyLongObject *pylong = (PyLongObject *)self; - assert(_PyLong_IsCompact(pylong)); - stwodigits ival = medium_value(pylong); - if (IS_SMALL_INT(ival)) { - PyLongObject *small_pylong = (PyLongObject *)get_small_int((sdigit)ival); - if (pylong == small_pylong) { - return 1; - } - } - return 0; + PyLongObject *long_object = (PyLongObject *)op; + int is_small_int = (long_object->long_value.lv_tag & IMMORTALITY_BIT_MASK) != 0; + assert((!is_small_int) || PyLong_CheckExact(op)); + return is_small_int; } void _PyLong_ExactDealloc(PyObject *self) { assert(PyLong_CheckExact(self)); + if (_long_is_small_int(self)) { + // See PEP 683, section Accidental De-Immortalizing for details + _Py_SetImmortal(self); + return; + } if (_PyLong_IsCompact((PyLongObject *)self)) { - #ifndef Py_GIL_DISABLED - if (compact_int_is_small(self)) { - // See PEP 683, section Accidental De-Immortalizing for details - _Py_SetImmortal(self); - return; - } - #endif _Py_FREELIST_FREE(ints, self, PyObject_Free); return; } @@ -3657,24 +3679,20 @@ _PyLong_ExactDealloc(PyObject *self) static void long_dealloc(PyObject *self) { - assert(self); - if (_PyLong_IsCompact((PyLongObject *)self)) { - if (compact_int_is_small(self)) { - /* This should never get called, but we also don't want to SEGV if - * we accidentally decref small Ints out of existence. Instead, - * since small Ints are immortal, re-set the reference count. - * - * See PEP 683, section Accidental De-Immortalizing for details - */ - _Py_SetImmortal(self); - return; - } - if (PyLong_CheckExact(self)) { - _Py_FREELIST_FREE(ints, self, PyObject_Free); - return; - } + if (_long_is_small_int(self)) { + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref small Ints out of existence. Instead, + * since small Ints are immortal, re-set the reference count. + * + * See PEP 683, section Accidental De-Immortalizing for details + */ + _Py_SetImmortal(self); + return; + } + if (PyLong_CheckExact(self) && _PyLong_IsCompact((PyLongObject *)self)) { + _Py_FREELIST_FREE(ints, self, PyObject_Free); + return; } - Py_TYPE(self)->tp_free(self); } @@ -3752,7 +3770,7 @@ x_add(PyLongObject *a, PyLongObject *b) size_a = size_b; size_b = size_temp; } } - z = _PyLong_New(size_a+1); + z = long_alloc(size_a+1); if (z == NULL) return NULL; for (i = 0; i < size_b; ++i) { @@ -3801,7 +3819,7 @@ x_sub(PyLongObject *a, PyLongObject *b) } size_a = size_b = i+1; } - z = _PyLong_New(size_a); + z = long_alloc(size_a); if (z == NULL) return NULL; for (i = 0; i < size_b; ++i) { @@ -3926,7 +3944,7 @@ x_mul(PyLongObject *a, PyLongObject *b) Py_ssize_t size_b = _PyLong_DigitCount(b); Py_ssize_t i; - z = _PyLong_New(size_a + size_b); + z = long_alloc(size_a + size_b); if (z == NULL) return NULL; @@ -4036,9 +4054,9 @@ kmul_split(PyLongObject *n, size_lo = Py_MIN(size_n, size); size_hi = size_n - size_lo; - if ((hi = _PyLong_New(size_hi)) == NULL) + if ((hi = long_alloc(size_hi)) == NULL) return -1; - if ((lo = _PyLong_New(size_lo)) == NULL) { + if ((lo = long_alloc(size_lo)) == NULL) { Py_DECREF(hi); return -1; } @@ -4138,7 +4156,7 @@ k_mul(PyLongObject *a, PyLongObject *b) */ /* 1. Allocate result space. */ - ret = _PyLong_New(asize + bsize); + ret = long_alloc(asize + bsize); if (ret == NULL) goto fail; #ifdef Py_DEBUG /* Fill with trash, to catch reference to uninitialized digits. */ @@ -4288,13 +4306,13 @@ k_lopsided_mul(PyLongObject *a, PyLongObject *b) assert(2 * asize <= bsize); /* Allocate result space, and zero it out. */ - ret = _PyLong_New(asize + bsize); + ret = long_alloc(asize + bsize); if (ret == NULL) return NULL; memset(ret->long_value.ob_digit, 0, _PyLong_DigitCount(ret) * sizeof(digit)); /* Successive slices of b are copied into bslice. */ - bslice = _PyLong_New(asize); + bslice = long_alloc(asize); if (bslice == NULL) goto fail; @@ -4760,7 +4778,7 @@ long_true_divide(PyObject *v, PyObject *w) "intermediate overflow during division"); goto error; } - x = _PyLong_New(a_size + shift_digits + 1); + x = long_alloc(a_size + shift_digits + 1); if (x == NULL) goto error; for (i = 0; i < shift_digits; i++) @@ -4774,7 +4792,7 @@ long_true_divide(PyObject *v, PyObject *w) digit rem; /* x = a >> shift */ assert(a_size >= shift_digits); - x = _PyLong_New(a_size - shift_digits); + x = long_alloc(a_size - shift_digits); if (x == NULL) goto error; rem = v_rshift(x->long_value.ob_digit, a->long_value.ob_digit + shift_digits, @@ -5354,7 +5372,7 @@ long_rshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) /* Shifting all the bits of 'a' out gives either -1 or 0. */ return PyLong_FromLong(-a_negative); } - z = _PyLong_New(newsize); + z = long_alloc(newsize); if (z == NULL) { return NULL; } @@ -5469,7 +5487,7 @@ long_lshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) newsize = oldsize + wordshift; if (remshift) ++newsize; - z = _PyLong_New(newsize); + z = long_alloc(newsize); if (z == NULL) return NULL; if (_PyLong_IsNegative(a)) { @@ -5584,7 +5602,7 @@ long_bitwise(PyLongObject *a, size_a = _PyLong_DigitCount(a); nega = _PyLong_IsNegative(a); if (nega) { - z = _PyLong_New(size_a); + z = long_alloc(size_a); if (z == NULL) return NULL; v_complement(z->long_value.ob_digit, a->long_value.ob_digit, size_a); @@ -5598,7 +5616,7 @@ long_bitwise(PyLongObject *a, size_b = _PyLong_DigitCount(b); negb = _PyLong_IsNegative(b); if (negb) { - z = _PyLong_New(size_b); + z = long_alloc(size_b); if (z == NULL) { Py_DECREF(a); return NULL; @@ -5642,7 +5660,7 @@ long_bitwise(PyLongObject *a, /* We allow an extra digit if z is negative, to make sure that the final two's complement of z doesn't overflow. */ - z = _PyLong_New(size_z + negz); + z = long_alloc(size_z + negz); if (z == NULL) { Py_DECREF(a); Py_DECREF(b); @@ -5840,7 +5858,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) } else { alloc_a = size_a; - c = _PyLong_New(size_a); + c = long_alloc(size_a); if (c == NULL) goto error; } @@ -5856,7 +5874,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) } else { alloc_b = size_a; - d = _PyLong_New(size_a); + d = long_alloc(size_a); if (d == NULL) goto error; } @@ -6036,7 +6054,7 @@ long_subtype_new(PyTypeObject *type, PyObject *x, PyObject *obase) return NULL; } assert(PyLong_Check(newobj)); - newobj->long_value.lv_tag = tmp->long_value.lv_tag; + newobj->long_value.lv_tag = tmp->long_value.lv_tag & ~IMMORTALITY_BIT_MASK; for (i = 0; i < n; i++) { newobj->long_value.ob_digit[i] = tmp->long_value.ob_digit[i]; } @@ -6904,7 +6922,7 @@ PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) } assert(digits != NULL); - PyLongObject *obj = _PyLong_New(ndigits); + PyLongObject *obj = long_alloc(ndigits); if (obj == NULL) { goto error; } @@ -6924,6 +6942,10 @@ PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) void PyLongWriter_Discard(PyLongWriter *writer) { + if (writer == NULL) { + return; + } + PyLongObject *obj = (PyLongObject *)writer; assert(Py_REFCNT(obj) == 1); Py_DECREF(obj); diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index ea4d24dc690768..331363b2babbef 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -2083,7 +2083,7 @@ struct_get_unpacker(const char *fmt, Py_ssize_t itemsize) PyObject *format = NULL; struct unpacker *x = NULL; - Struct = _PyImport_GetModuleAttrString("struct", "Struct"); + Struct = PyImport_ImportModuleAttrString("struct", "Struct"); if (Struct == NULL) return NULL; diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 345da4607423cf..ecec0f7205a11d 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -331,7 +331,7 @@ meth_hash(PyObject *self) { PyCFunctionObject *a = _PyCFunctionObject_CAST(self); Py_hash_t x = PyObject_GenericHash(a->m_self); - Py_hash_t y = _Py_HashPointer((void*)(a->m_ml->ml_meth)); + Py_hash_t y = Py_HashPointer((void*)(a->m_ml->ml_meth)); x ^= y; if (x == -1) { x = -2; diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index a8d64c9aefae6b..740392b061ba9a 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -703,7 +703,8 @@ _PyModule_ClearDict(PyObject *d) PyErr_Clear(); } if (PyDict_SetItem(d, key, Py_None) != 0) { - PyErr_FormatUnraisable("Exception ignored on clearing module dict"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing module dict"); } } } @@ -724,7 +725,8 @@ _PyModule_ClearDict(PyObject *d) PyErr_Clear(); } if (PyDict_SetItem(d, key, Py_None) != 0) { - PyErr_FormatUnraisable("Exception ignored on clearing module dict"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing module dict"); } } } diff --git a/Objects/object.c b/Objects/object.c index 4c30257ca26938..fdff16138201a0 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -19,7 +19,7 @@ #include "pycore_object.h" // PyAPI_DATA() _Py_SwappedOp definition #include "pycore_object_state.h" // struct _reftracer_runtime_state #include "pycore_long.h" // _PyLong_GetZero() -#include "pycore_optimizer.h" // _PyUOpExecutor_Type, _PyUOpOptimizer_Type, ... +#include "pycore_optimizer.h" // _PyUOpExecutor_Type, ... #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -923,6 +923,8 @@ _PyObject_ClearFreeLists(struct _Py_freelists *freelists, int is_finalization) clear_freelist(&freelists->tuples[i], is_finalization, free_object); } clear_freelist(&freelists->lists, is_finalization, free_object); + clear_freelist(&freelists->list_iters, is_finalization, free_object); + clear_freelist(&freelists->tuple_iters, is_finalization, free_object); clear_freelist(&freelists->dicts, is_finalization, free_object); clear_freelist(&freelists->dictkeys, is_finalization, PyMem_Free); clear_freelist(&freelists->slices, is_finalization, free_object); @@ -937,6 +939,7 @@ _PyObject_ClearFreeLists(struct _Py_freelists *freelists, int is_finalization) } clear_freelist(&freelists->unicode_writers, is_finalization, PyMem_Free); clear_freelist(&freelists->ints, is_finalization, free_object); + clear_freelist(&freelists->pymethodobjects, is_finalization, free_object); } /* @@ -2378,11 +2381,6 @@ static PyTypeObject* static_types[] = { &_PyBufferWrapper_Type, &_PyContextTokenMissing_Type, &_PyCoroWrapper_Type, -#ifdef _Py_TIER2 - &_PyCounterExecutor_Type, - &_PyCounterOptimizer_Type, - &_PyDefaultOptimizer_Type, -#endif &_Py_GenericAliasIterType, &_PyHamtItems_Type, &_PyHamtKeys_Type, @@ -2405,7 +2403,6 @@ static PyTypeObject* static_types[] = { &_PyUnion_Type, #ifdef _Py_TIER2 &_PyUOpExecutor_Type, - &_PyUOpOptimizer_Type, #endif &_PyWeakref_CallableProxyType, &_PyWeakref_ProxyType, @@ -2589,6 +2586,20 @@ PyUnstable_Object_EnableDeferredRefcount(PyObject *op) #endif } +int +PyUnstable_TryIncRef(PyObject *op) +{ + return _Py_TryIncref(op); +} + +void +PyUnstable_EnableTryIncRef(PyObject *op) +{ +#ifdef Py_GIL_DISABLED + _PyObject_SetMaybeWeakref(op); +#endif +} + void _Py_ResurrectReference(PyObject *op) { @@ -3074,14 +3085,14 @@ _Py_SetRefcnt(PyObject *ob, Py_ssize_t refcnt) } int PyRefTracer_SetTracer(PyRefTracer tracer, void *data) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); _PyRuntime.ref_tracer.tracer_func = tracer; _PyRuntime.ref_tracer.tracer_data = data; return 0; } PyRefTracer PyRefTracer_GetTracer(void** data) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); if (data != NULL) { *data = _PyRuntime.ref_tracer.tracer_data; } @@ -3156,3 +3167,12 @@ Py_REFCNT(PyObject *ob) { return _Py_REFCNT(ob); } + +int +PyUnstable_IsImmortal(PyObject *op) +{ + /* Checking a reference count requires a thread state */ + _Py_AssertHoldsTstate(); + assert(op != NULL); + return _Py_IsImmortal(op); +} diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index b103deb01ca712..5688049b024696 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -2909,7 +2909,8 @@ _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) static inline void _PyMem_DebugCheckGIL(const char *func) { - if (!PyGILState_Check()) { + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate == NULL) { #ifndef Py_GIL_DISABLED _Py_FatalErrorFunc(func, "Python memory allocator called " diff --git a/Objects/odictobject.c b/Objects/odictobject.c index e151023dd764bf..f2d8da0c567878 100644 --- a/Objects/odictobject.c +++ b/Objects/odictobject.c @@ -260,7 +260,7 @@ mp_length __len__ - dict_length mp_subscript __getitem__ - dict_subscript mp_ass_subscript __setitem__ - dict_ass_sub __delitem__ -tp_hash __hash__ _Py_HashPointer ..._HashNotImpl +tp_hash __hash__ Py_HashPointer ..._HashNotImpl tp_str __str__ object_str - tp_getattro __getattribute__ ..._GenericGetAttr (repeated) __getattr__ diff --git a/Objects/setobject.c b/Objects/setobject.c index 955ccbebf74b54..26ab352ca6d989 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -1298,7 +1298,7 @@ set_union_impl(PySetObject *so, PyObject * const *others, PyObject *other; Py_ssize_t i; - result = (PySetObject *)set_copy(so, NULL); + result = (PySetObject *)set_copy((PyObject *)so, NULL); if (result == NULL) return NULL; @@ -1321,13 +1321,12 @@ set_or(PyObject *self, PyObject *other) if (!PyAnySet_Check(self) || !PyAnySet_Check(other)) Py_RETURN_NOTIMPLEMENTED; - PySetObject *so = _PySet_CAST(self); - result = (PySetObject *)set_copy(so, NULL); + result = (PySetObject *)set_copy(self, NULL); if (result == NULL) { return NULL; } - if (Py_Is((PyObject *)so, other)) { + if (Py_Is(self, other)) { return (PyObject *)result; } if (set_update_local(result, other)) { @@ -1449,7 +1448,7 @@ set_intersection_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t i; if (others_length == 0) { - return set_copy(so, NULL); + return set_copy((PyObject *)so, NULL); } PyObject *result = Py_NewRef(so); @@ -1806,7 +1805,7 @@ set_difference_multi_impl(PySetObject *so, PyObject * const *others, PyObject *result, *other; if (others_length == 0) { - return set_copy(so, NULL); + return set_copy((PyObject *)so, NULL); } other = others[0]; @@ -1929,7 +1928,7 @@ set_symmetric_difference_update(PySetObject *so, PyObject *other) /*[clinic end generated code: output=fbb049c0806028de input=a50acf0365e1f0a5]*/ { if (Py_Is((PyObject *)so, other)) { - return set_clear(so, NULL); + return set_clear((PyObject *)so, NULL); } int rv; @@ -2646,7 +2645,7 @@ PySet_Clear(PyObject *set) PyErr_BadInternalCall(); return -1; } - (void)set_clear((PySetObject *)set, NULL); + (void)set_clear(set, NULL); return 0; } @@ -2742,7 +2741,7 @@ PySet_Pop(PyObject *set) PyErr_BadInternalCall(); return NULL; } - return set_pop((PySetObject *)set, NULL); + return set_pop(set, NULL); } int diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index 4fef0af93fe095..1b07c2a2c498b9 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -399,11 +399,14 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, step_is_negative = 0; } else { - int step_sign; step = evaluate_slice_index(self->step); - if (step == NULL) + if (step == NULL) { goto error; - step_sign = _PyLong_Sign(step); + } + assert(PyLong_Check(step)); + + int step_sign; + (void)PyLong_GetSign(step, &step_sign); if (step_sign == 0) { PyErr_SetString(PyExc_ValueError, "slice step cannot be zero"); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 002002eb455556..60af9e40e3fe83 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -391,16 +391,13 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) } PyObject * -_PyTuple_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) +_PyTuple_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n) { if (n == 0) { return tuple_get_empty(); } PyTupleObject *tuple = tuple_alloc(n); if (tuple == NULL) { - for (Py_ssize_t i = 0; i < n; i++) { - PyStackRef_CLOSE(src[i]); - } return NULL; } PyObject **dst = tuple->ob_item; @@ -996,7 +993,8 @@ tupleiter_dealloc(PyObject *self) _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); - PyObject_GC_Del(it); + assert(Py_IS_TYPE(self, &PyTupleIter_Type)); + _Py_FREELIST_FREE(tuple_iters, it, PyObject_GC_Del); } static int @@ -1122,15 +1120,16 @@ PyTypeObject PyTupleIter_Type = { static PyObject * tuple_iter(PyObject *seq) { - _PyTupleIterObject *it; - if (!PyTuple_Check(seq)) { PyErr_BadInternalCall(); return NULL; } - it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type); - if (it == NULL) - return NULL; + _PyTupleIterObject *it = _Py_FREELIST_POP(_PyTupleIterObject, tuple_iters); + if (it == NULL) { + it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type); + if (it == NULL) + return NULL; + } it->it_index = 0; it->it_seq = (PyTupleObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 7f95b519561e68..93920341a179e8 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -992,6 +992,7 @@ static void set_version_unlocked(PyTypeObject *tp, unsigned int version) { ASSERT_TYPE_LOCK_HELD(); + assert(version == 0 || (tp->tp_versions_used != _Py_ATTR_CACHE_UNUSED)); #ifndef Py_GIL_DISABLED PyInterpreterState *interp = _PyInterpreterState_GET(); // lookup the old version and set to null @@ -1038,7 +1039,7 @@ type_modified_unlocked(PyTypeObject *type) We don't assign new version tags eagerly, but only as needed. */ - if (type->tp_version_tag == 0) { + if (FT_ATOMIC_LOAD_UINT_RELAXED(type->tp_version_tag) == 0) { return; } // Cannot modify static builtin types. @@ -1148,6 +1149,10 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { PyObject *b = PyTuple_GET_ITEM(bases, i); PyTypeObject *cls = _PyType_CAST(b); + if (cls->tp_versions_used >= _Py_ATTR_CACHE_UNUSED) { + goto clear; + } + if (!is_subtype_with_mro(lookup_tp_mro(type), type, cls)) { goto clear; } @@ -1156,7 +1161,8 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { clear: assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); - set_version_unlocked(type, 0); /* 0 is not a valid version tag */ + set_version_unlocked(type, 0); /* 0 is not a valid version tag */ + type->tp_versions_used = _Py_ATTR_CACHE_UNUSED; if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { // This field *must* be invalidated if the type is modified (see the // comment on struct _specialization_cache): @@ -1208,6 +1214,9 @@ _PyType_GetVersionForCurrentState(PyTypeObject *tp) #define MAX_VERSIONS_PER_CLASS 1000 +#if _Py_ATTR_CACHE_UNUSED < MAX_VERSIONS_PER_CLASS +#error "_Py_ATTR_CACHE_UNUSED must be bigger than max" +#endif static int assign_version_tag(PyInterpreterState *interp, PyTypeObject *type) @@ -1225,6 +1234,7 @@ assign_version_tag(PyInterpreterState *interp, PyTypeObject *type) return 0; } if (type->tp_versions_used >= MAX_VERSIONS_PER_CLASS) { + /* (this includes `tp_versions_used == _Py_ATTR_CACHE_UNUSED`) */ return 0; } @@ -2860,7 +2870,7 @@ vectorcall_maybe(PyThreadState *tstate, PyObject *name, */ static int -tail_contains(PyObject *tuple, int whence, PyObject *o) +tail_contains(PyObject *tuple, Py_ssize_t whence, PyObject *o) { Py_ssize_t j, size; size = PyTuple_GET_SIZE(tuple); @@ -2923,7 +2933,7 @@ check_duplicates(PyObject *tuple) */ static void -set_mro_error(PyObject **to_merge, Py_ssize_t to_merge_size, int *remain) +set_mro_error(PyObject **to_merge, Py_ssize_t to_merge_size, Py_ssize_t *remain) { Py_ssize_t i, n, off; char buf[1000]; @@ -2978,13 +2988,13 @@ pmerge(PyObject *acc, PyObject **to_merge, Py_ssize_t to_merge_size) { int res = 0; Py_ssize_t i, j, empty_cnt; - int *remain; + Py_ssize_t *remain; /* remain stores an index into each sublist of to_merge. remain[i] is the index of the next base in to_merge[i] that is not included in acc. */ - remain = PyMem_New(int, to_merge_size); + remain = PyMem_New(Py_ssize_t, to_merge_size); if (remain == NULL) { PyErr_NoMemory(); return -1; @@ -6150,7 +6160,7 @@ type_dealloc(PyObject *self) Py_XDECREF(et->ht_module); PyMem_Free(et->_ht_tpname); #ifdef Py_GIL_DISABLED - assert(et->unique_id == -1); + assert(et->unique_id == _Py_INVALID_UNIQUE_ID); #endif et->ht_token = NULL; Py_TYPE(type)->tp_free((PyObject *)type); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 3eafa2381c1a4d..75967d69ed374d 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1735,7 +1735,9 @@ unicode_dealloc(PyObject *unicode) PyObject *popped; int r = PyDict_Pop(interned, unicode, &popped); if (r == -1) { - PyErr_WriteUnraisable(unicode); + PyErr_FormatUnraisable("Exception ignored while " + "removing an interned string %R", + unicode); // We don't know what happened to the string. It's probably // best to leak it: // - if it was popped, there are no more references to it @@ -6853,7 +6855,8 @@ _PyUnicode_DecodeUnicodeEscapeStateful(const char *s, unsigned char c = *first_invalid_escape; if ('4' <= c && c <= '7') { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid octal escape sequence '\\%.3s'", + "\"\\%.3s\" is an invalid octal escape sequence. " + "Such sequences will not work in the future. ", first_invalid_escape) < 0) { Py_DECREF(result); @@ -6862,7 +6865,8 @@ _PyUnicode_DecodeUnicodeEscapeStateful(const char *s, } else { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid escape sequence '\\%c'", + "\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. ", c) < 0) { Py_DECREF(result); @@ -16484,3 +16488,24 @@ PyInit__string(void) { return PyModuleDef_Init(&_string_module); } + + +#undef PyUnicode_KIND +int PyUnicode_KIND(PyObject *op) +{ + if (!PyUnicode_Check(op)) { + PyErr_Format(PyExc_TypeError, "expect str, got %T", op); + return -1; + } + return _PyASCIIObject_CAST(op)->state.kind; +} + +#undef PyUnicode_DATA +void* PyUnicode_DATA(PyObject *op) +{ + if (!PyUnicode_Check(op)) { + PyErr_Format(PyExc_TypeError, "expect str, got %T", op); + return NULL; + } + return _PyUnicode_DATA(op); +} diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 0ee64ed70a63cd..bd4c4ac9b3475a 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -987,10 +987,13 @@ handle_callback(PyWeakReference *ref, PyObject *callback) { PyObject *cbresult = PyObject_CallOneArg(callback, (PyObject *)ref); - if (cbresult == NULL) - PyErr_WriteUnraisable(callback); - else + if (cbresult == NULL) { + PyErr_FormatUnraisable("Exception ignored while " + "calling weakref callback %R", callback); + } + else { Py_DECREF(cbresult); + } } /* This function is called by the tp_dealloc handler to clear weak references. @@ -1042,7 +1045,8 @@ PyObject_ClearWeakRefs(PyObject *object) PyObject *tuple = PyTuple_New(num_weakrefs * 2); if (tuple == NULL) { _PyWeakref_ClearWeakRefsNoCallbacks(object); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "clearing object weakrefs"); PyErr_SetRaisedException(exc); return; } diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h index b14913366b77eb..00fa6a75ec113e 100644 --- a/PC/clinic/winreg.c.h +++ b/PC/clinic/winreg.c.h @@ -26,9 +26,9 @@ static PyObject * winreg_HKEYType_Close_impl(PyHKEYObject *self); static PyObject * -winreg_HKEYType_Close(PyHKEYObject *self, PyObject *Py_UNUSED(ignored)) +winreg_HKEYType_Close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return winreg_HKEYType_Close_impl(self); + return winreg_HKEYType_Close_impl((PyHKEYObject *)self); } #endif /* (defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES)) */ @@ -56,9 +56,9 @@ static PyObject * winreg_HKEYType_Detach_impl(PyHKEYObject *self); static PyObject * -winreg_HKEYType_Detach(PyHKEYObject *self, PyObject *Py_UNUSED(ignored)) +winreg_HKEYType_Detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return winreg_HKEYType_Detach_impl(self); + return winreg_HKEYType_Detach_impl((PyHKEYObject *)self); } #endif /* (defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES)) */ @@ -77,12 +77,12 @@ static PyHKEYObject * winreg_HKEYType___enter___impl(PyHKEYObject *self); static PyObject * -winreg_HKEYType___enter__(PyHKEYObject *self, PyObject *Py_UNUSED(ignored)) +winreg_HKEYType___enter__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; PyHKEYObject *_return_value; - _return_value = winreg_HKEYType___enter___impl(self); + _return_value = winreg_HKEYType___enter___impl((PyHKEYObject *)self); return_value = (PyObject *)_return_value; return return_value; @@ -105,7 +105,7 @@ winreg_HKEYType___exit___impl(PyHKEYObject *self, PyObject *exc_type, PyObject *exc_value, PyObject *traceback); static PyObject * -winreg_HKEYType___exit__(PyHKEYObject *self, PyObject *const *args, Py_ssize_t nargs) +winreg_HKEYType___exit__(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type; @@ -118,7 +118,7 @@ winreg_HKEYType___exit__(PyHKEYObject *self, PyObject *const *args, Py_ssize_t n exc_type = args[0]; exc_value = args[1]; traceback = args[2]; - return_value = winreg_HKEYType___exit___impl(self, exc_type, exc_value, traceback); + return_value = winreg_HKEYType___exit___impl((PyHKEYObject *)self, exc_type, exc_value, traceback); exit: return return_value; @@ -1766,4 +1766,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg) #ifndef WINREG_QUERYREFLECTIONKEY_METHODDEF #define WINREG_QUERYREFLECTIONKEY_METHODDEF #endif /* !defined(WINREG_QUERYREFLECTIONKEY_METHODDEF) */ -/*[clinic end generated code: output=aef4aa8ab8ddf38f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=fbe9b075cd2fa833 input=a9049054013a1b77]*/ diff --git a/PC/pyconfig.h.in b/PC/pyconfig.h.in index 010f5fe5646630..f4f57c5d270028 100644 --- a/PC/pyconfig.h.in +++ b/PC/pyconfig.h.in @@ -753,4 +753,7 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ /* Define if libssl has X509_VERIFY_PARAM_set1_host and related function */ #define HAVE_X509_VERIFY_PARAM_SET1_HOST 1 +// Truncate the thread name to 32766 characters. +#define _PYTHREAD_NAME_MAXLEN 32766 + #endif /* !Py_CONFIG_H */ diff --git a/PC/python3dll.c b/PC/python3dll.c index 8657ddb9fa5155..84b3c735240b73 100755 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -81,6 +81,8 @@ EXPORT_FUNC(Py_Main) EXPORT_FUNC(Py_MakePendingCalls) EXPORT_FUNC(Py_NewInterpreter) EXPORT_FUNC(Py_NewRef) +EXPORT_FUNC(Py_PACK_FULL_VERSION) +EXPORT_FUNC(Py_PACK_VERSION) EXPORT_FUNC(Py_REFCNT) EXPORT_FUNC(Py_ReprEnter) EXPORT_FUNC(Py_ReprLeave) diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index c41235eac356af..a68f15d25aabb7 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -127,6 +127,10 @@ + + + + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 0a00df655deefc..21091e9dc1aa16 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -114,6 +114,18 @@ Source Files + + Source Files + + + Source Files + + + Source Files + + + Source Files + diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj index 846e027e10c7fa..36c41fc9824fda 100644 --- a/PCbuild/_testlimitedcapi.vcxproj +++ b/PCbuild/_testlimitedcapi.vcxproj @@ -103,6 +103,7 @@ + @@ -112,6 +113,8 @@ + + diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters index 57be2e2fc5b950..62ecb2f70ffa2d 100644 --- a/PCbuild/_testlimitedcapi.vcxproj.filters +++ b/PCbuild/_testlimitedcapi.vcxproj.filters @@ -18,6 +18,7 @@ + @@ -28,6 +29,8 @@ + + diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props index 17abfa85201a90..dbdb6b743bea37 100644 --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -53,7 +53,6 @@ $(PySourcePath)Include;$(PySourcePath)Include\internal;$(PySourcePath)Include\internal\mimalloc;$(GeneratedPyConfigDir);$(PySourcePath)PC;%(AdditionalIncludeDirectories) WIN32;$(_Py3NamePreprocessorDefinition);$(_PlatformPreprocessorDefinition)$(_DebugPreprocessorDefinition)$(_PyStatsPreprocessorDefinition)$(_PydPreprocessorDefinition)%(PreprocessorDefinitions) - _Py_USING_PGO=1;%(PreprocessorDefinitions) MaxSpeed true diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 5ac1dd7813689c..2fe8d11badcbac 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -969,8 +969,6 @@ _PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) return result_token_with_metadata(p, conv, conv_token->metadata); } -static asdl_expr_seq * -unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions); ResultTokenWithMetadata * _PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena) @@ -1279,9 +1277,9 @@ _PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant, Token* tok p->arena); } -static asdl_expr_seq * -unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) -{ +expr_ty +_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* expr, Token*b) { + /* The parser might put multiple f-string values into an individual * JoinedStr node at the top level due to stuff like f-string debugging * expressions. This function flattens those and promotes them to the @@ -1289,44 +1287,14 @@ unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) * of the regular output, so this is not necessary if you are not going * to expose the output AST to Python level. */ - Py_ssize_t i, req_size, raw_size; - - req_size = raw_size = asdl_seq_LEN(raw_expressions); - expr_ty expr; - for (i = 0; i < raw_size; i++) { - expr = asdl_seq_GET(raw_expressions, i); - if (expr->kind == JoinedStr_kind) { - req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1; - } - } - - asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena); - if (expressions == NULL) { - return NULL; - } - - Py_ssize_t raw_index, req_index = 0; - for (raw_index = 0; raw_index < raw_size; raw_index++) { - expr = asdl_seq_GET(raw_expressions, raw_index); - if (expr->kind == JoinedStr_kind) { - asdl_expr_seq *values = expr->v.JoinedStr.values; - for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) { - asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n)); - req_index++; - } - } else { - asdl_seq_SET(expressions, req_index, expr); - req_index++; + Py_ssize_t n_items = asdl_seq_LEN(expr); + Py_ssize_t total_items = n_items; + for (Py_ssize_t i = 0; i < n_items; i++) { + expr_ty item = asdl_seq_GET(expr, i); + if (item->kind == JoinedStr_kind) { + total_items += asdl_seq_LEN(item->v.JoinedStr.values) - 1; } } - return expressions; -} - -expr_ty -_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) { - - asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions); - Py_ssize_t n_items = asdl_seq_LEN(expr); const char* quote_str = PyBytes_AsString(a->bytes); if (quote_str == NULL) { @@ -1334,7 +1302,7 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b } int is_raw = strpbrk(quote_str, "rR") != NULL; - asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena); + asdl_expr_seq *seq = _Py_asdl_expr_seq_new(total_items, p->arena); if (seq == NULL) { return NULL; } @@ -1342,6 +1310,31 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b Py_ssize_t index = 0; for (Py_ssize_t i = 0; i < n_items; i++) { expr_ty item = asdl_seq_GET(expr, i); + + // This should correspond to a JoinedStr node of two elements + // created _PyPegen_formatted_value. This situation can only be the result of + // a f-string debug expression where the first element is a constant with the text and the second + // a formatted value with the expression. + if (item->kind == JoinedStr_kind) { + asdl_expr_seq *values = item->v.JoinedStr.values; + if (asdl_seq_LEN(values) != 2) { + PyErr_Format(PyExc_SystemError, + "unexpected JoinedStr node without debug data in f-string at line %d", + item->lineno); + return NULL; + } + + expr_ty first = asdl_seq_GET(values, 0); + assert(first->kind == Constant_kind); + asdl_seq_SET(seq, index++, first); + + expr_ty second = asdl_seq_GET(values, 1); + assert(second->kind == FormattedValue_kind); + asdl_seq_SET(seq, index++, second); + + continue; + } + if (item->kind == Constant_kind) { item = _PyPegen_decode_fstring_part(p, is_raw, item, b); if (item == NULL) { @@ -1360,7 +1353,7 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b } asdl_expr_seq *resized_exprs; - if (index != n_items) { + if (index != total_items) { resized_exprs = _Py_asdl_expr_seq_new(index, p->arena); if (resized_exprs == NULL) { return NULL; diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 853a3e99807bca..7b2df738119115 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1462,10 +1462,11 @@ def visitModule(self, mod): return PyObject_Repr(list); } - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; PyObject *items[2] = {NULL, NULL}; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } items[0] = PySequence_GetItem(list, 0); if (!items[0]) { @@ -1479,52 +1480,54 @@ def visitModule(self, mod): } bool is_list = PyList_Check(list); - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? '[' : '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? '[' : '(') < 0) { goto error; } for (Py_ssize_t i = 0; i < Py_MIN(length, 2); i++) { - PyObject *item = items[i]; - PyObject *item_repr; + if (i > 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { + goto error; + } + } + PyObject *item = items[i]; if (PyType_IsSubtype(Py_TYPE(item), (PyTypeObject *)state->AST_type)) { + PyObject *item_repr; item_repr = ast_repr_max_depth((AST_object*)item, depth - 1); - } else { - item_repr = PyObject_Repr(item); - } - if (!item_repr) { - goto error; - } - if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (!item_repr) { + goto error; + } + if (PyUnicodeWriter_WriteStr(writer, item_repr) < 0) { + Py_DECREF(item_repr); goto error; } - } - if (_PyUnicodeWriter_WriteStr(&writer, item_repr) < 0) { Py_DECREF(item_repr); - goto error; + } else { + if (PyUnicodeWriter_WriteRepr(writer, item) < 0) { + goto error; + } } + if (i == 0 && length > 2) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ...", 5) < 0) { - Py_DECREF(item_repr); + if (PyUnicodeWriter_WriteUTF8(writer, ", ...", 5) < 0) { goto error; } } - Py_DECREF(item_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? ']' : ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? ']' : ')') < 0) { goto error; } Py_XDECREF(items[0]); Py_XDECREF(items[1]); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_XDECREF(items[0]); Py_XDECREF(items[1]); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } @@ -1568,14 +1571,15 @@ def visitModule(self, mod): } const char* tp_name = Py_TYPE(self)->tp_name; - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } - if (_PyUnicodeWriter_WriteASCIIString(&writer, tp_name, strlen(tp_name)) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, tp_name, -1) < 0) { goto error; } - if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '(') < 0) { goto error; } @@ -1610,13 +1614,13 @@ def visitModule(self, mod): } if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; } } - if (_PyUnicodeWriter_WriteStr(&writer, name) < 0) { + if (PyUnicodeWriter_WriteStr(writer, name) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; @@ -1624,11 +1628,11 @@ def visitModule(self, mod): Py_DECREF(name); - if (_PyUnicodeWriter_WriteChar(&writer, '=') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '=') < 0) { Py_DECREF(value_repr); goto error; } - if (_PyUnicodeWriter_WriteStr(&writer, value_repr) < 0) { + if (PyUnicodeWriter_WriteStr(writer, value_repr) < 0) { Py_DECREF(value_repr); goto error; } @@ -1636,17 +1640,17 @@ def visitModule(self, mod): Py_DECREF(value_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, ')') < 0) { goto error; } Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c index dbbb94a407c81d..8c5ae37fa90860 100644 --- a/Parser/lexer/lexer.c +++ b/Parser/lexer/lexer.c @@ -212,9 +212,7 @@ _PyLexer_update_fstring_expr(struct tok_state *tok, char cur) case '}': case '!': case ':': - if (tok_mode->last_expr_end == -1) { - tok_mode->last_expr_end = strlen(tok->start); - } + tok_mode->last_expr_end = strlen(tok->start); break; default: Py_UNREACHABLE(); diff --git a/Parser/pegen.c b/Parser/pegen.c index bb98e7b184a4dc..83b0022e47d619 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -111,7 +111,7 @@ init_normalization(Parser *p) if (p->normalize) { return 1; } - p->normalize = _PyImport_GetModuleAttrString("unicodedata", "normalize"); + p->normalize = PyImport_ImportModuleAttrString("unicodedata", "normalize"); if (!p->normalize) { return 0; diff --git a/Parser/string_parser.c b/Parser/string_parser.c index 9537c543b0eb93..9dd8f9ef28bd4f 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -28,9 +28,16 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token int octal = ('4' <= c && c <= '7'); PyObject *msg = octal - ? PyUnicode_FromFormat("invalid octal escape sequence '\\%.3s'", - first_invalid_escape) - : PyUnicode_FromFormat("invalid escape sequence '\\%c'", c); + ? PyUnicode_FromFormat( + "\"\\%.3s\" is an invalid octal escape sequence. " + "Such sequences will not work in the future. " + "Did you mean \"\\\\%.3s\"? A raw string is also an option.", + first_invalid_escape, first_invalid_escape) + : PyUnicode_FromFormat( + "\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + c, c); if (msg == NULL) { return -1; } @@ -53,11 +60,16 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token error location, if p->known_err_token is not set. */ p->known_err_token = t; if (octal) { - RAISE_SYNTAX_ERROR("invalid octal escape sequence '\\%.3s'", - first_invalid_escape); + RAISE_SYNTAX_ERROR( + "\"\\%.3s\" is an invalid octal escape sequence. " + "Did you mean \"\\\\%.3s\"? A raw string is also an option.", + first_invalid_escape, first_invalid_escape); } else { - RAISE_SYNTAX_ERROR("invalid escape sequence '\\%c'", c); + RAISE_SYNTAX_ERROR( + "\"\\%c\" is an invalid escape sequence. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + c, c); } } Py_DECREF(msg); diff --git a/Parser/tokenizer/file_tokenizer.c b/Parser/tokenizer/file_tokenizer.c index 2750527da484aa..efe9fb9b56abaf 100644 --- a/Parser/tokenizer/file_tokenizer.c +++ b/Parser/tokenizer/file_tokenizer.c @@ -158,7 +158,7 @@ fp_setreadl(struct tok_state *tok, const char* enc) return 0; } - open = _PyImport_GetModuleAttrString("io", "open"); + open = PyImport_ImportModuleAttrString("io", "open"); if (open == NULL) { return 0; } diff --git a/Parser/tokenizer/helpers.c b/Parser/tokenizer/helpers.c index 9c9d05bbef0f1a..5a416adb875aa1 100644 --- a/Parser/tokenizer/helpers.c +++ b/Parser/tokenizer/helpers.c @@ -113,7 +113,10 @@ _PyTokenizer_warn_invalid_escape_sequence(struct tok_state *tok, int first_inval } PyObject *msg = PyUnicode_FromFormat( - "invalid escape sequence '\\%c'", + "\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + (char) first_invalid_escape_char, (char) first_invalid_escape_char ); @@ -129,7 +132,12 @@ _PyTokenizer_warn_invalid_escape_sequence(struct tok_state *tok, int first_inval /* Replace the SyntaxWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); - return _PyTokenizer_syntaxerror(tok, "invalid escape sequence '\\%c'", (char) first_invalid_escape_char); + + return _PyTokenizer_syntaxerror(tok, + "\"\\%c\" is an invalid escape sequence. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + (char) first_invalid_escape_char, + (char) first_invalid_escape_char); } return -1; diff --git a/Programs/_testembed.c b/Programs/_testembed.c index d15dd519dbf6af..6f6d0cae58010e 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -1791,55 +1791,6 @@ static int test_init_warnoptions(void) } -static int tune_config(void) -{ - PyConfig config; - PyConfig_InitPythonConfig(&config); - if (_PyInterpreterState_GetConfigCopy(&config) < 0) { - PyConfig_Clear(&config); - PyErr_Print(); - return -1; - } - - config.bytes_warning = 2; - - if (_PyInterpreterState_SetConfig(&config) < 0) { - PyConfig_Clear(&config); - return -1; - } - PyConfig_Clear(&config); - return 0; -} - - -static int test_init_set_config(void) -{ - // Initialize core - PyConfig config; - PyConfig_InitIsolatedConfig(&config); - config_set_string(&config, &config.program_name, PROGRAM_NAME); - config._init_main = 0; - config.bytes_warning = 0; - init_from_config_clear(&config); - - // Tune the configuration using _PyInterpreterState_SetConfig() - if (tune_config() < 0) { - PyErr_Print(); - return 1; - } - - // Finish initialization: main part - PyStatus status = _Py_InitializeMain(); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - - dump_config(); - Py_Finalize(); - return 0; -} - - static int initconfig_getint(PyInitConfig *config, const char *name) { int64_t value; @@ -2089,33 +2040,6 @@ static int test_init_run_main(void) } -static int test_init_main(void) -{ - PyConfig config; - PyConfig_InitPythonConfig(&config); - - configure_init_main(&config); - config._init_main = 0; - init_from_config_clear(&config); - - /* sys.stdout don't exist yet: it is created by _Py_InitializeMain() */ - int res = PyRun_SimpleString( - "import sys; " - "print('Run Python code before _Py_InitializeMain', " - "file=sys.stderr)"); - if (res < 0) { - exit(1); - } - - PyStatus status = _Py_InitializeMain(); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - - return Py_RunMain(); -} - - static int test_run_main(void) { PyConfig config; @@ -2473,14 +2397,12 @@ static struct TestCase TestCases[] = { {"test_preinit_dont_parse_argv", test_preinit_dont_parse_argv}, {"test_init_read_set", test_init_read_set}, {"test_init_run_main", test_init_run_main}, - {"test_init_main", test_init_main}, {"test_init_sys_add", test_init_sys_add}, {"test_init_setpath", test_init_setpath}, {"test_init_setpath_config", test_init_setpath_config}, {"test_init_setpythonhome", test_init_setpythonhome}, {"test_init_is_python_build", test_init_is_python_build}, {"test_init_warnoptions", test_init_warnoptions}, - {"test_init_set_config", test_init_set_config}, {"test_initconfig_api", test_initconfig_api}, {"test_initconfig_get_api", test_initconfig_get_api}, {"test_initconfig_exit", test_initconfig_exit}, diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h index a0007830e8cbc0..1442434f9eba26 100644 --- a/Programs/test_frozenmain.h +++ b/Programs/test_frozenmain.h @@ -1,37 +1,37 @@ // Auto-generated by Programs/freeze_test_frozenmain.py unsigned char M_test_frozenmain[] = { 227,0,0,0,0,0,0,0,0,0,0,0,0,9,0,0, - 0,0,0,0,0,243,170,0,0,0,149,0,90,0,80,0, - 71,0,112,0,90,0,80,0,71,1,112,1,89,2,32,0, - 80,1,50,1,0,0,0,0,0,0,30,0,89,2,32,0, - 80,2,89,0,78,6,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,50,2,0,0,0,0,0,0, - 30,0,89,1,78,8,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,32,0,50,0,0,0,0,0, - 0,0,80,3,2,0,0,0,112,5,80,4,16,0,68,21, - 0,0,28,0,112,6,89,2,32,0,80,5,89,6,12,0, - 80,6,89,5,89,6,2,0,0,0,12,0,48,4,50,1, - 0,0,0,0,0,0,30,0,73,23,0,0,9,0,30,0, - 80,0,34,0,41,7,78,122,18,70,114,111,122,101,110,32, - 72,101,108,108,111,32,87,111,114,108,100,122,8,115,121,115, - 46,97,114,103,118,218,6,99,111,110,102,105,103,41,5,218, - 12,112,114,111,103,114,97,109,95,110,97,109,101,218,10,101, - 120,101,99,117,116,97,98,108,101,218,15,117,115,101,95,101, - 110,118,105,114,111,110,109,101,110,116,218,17,99,111,110,102, - 105,103,117,114,101,95,99,95,115,116,100,105,111,218,14,98, - 117,102,102,101,114,101,100,95,115,116,100,105,111,122,7,99, - 111,110,102,105,103,32,122,2,58,32,41,7,218,3,115,121, - 115,218,17,95,116,101,115,116,105,110,116,101,114,110,97,108, - 99,97,112,105,218,5,112,114,105,110,116,218,4,97,114,103, - 118,218,11,103,101,116,95,99,111,110,102,105,103,115,114,2, - 0,0,0,218,3,107,101,121,169,0,243,0,0,0,0,218, - 18,116,101,115,116,95,102,114,111,122,101,110,109,97,105,110, - 46,112,121,218,8,60,109,111,100,117,108,101,62,114,17,0, - 0,0,1,0,0,0,115,94,0,0,0,240,3,1,1,1, - 243,8,0,1,11,219,0,24,225,0,5,208,6,26,212,0, - 27,217,0,5,128,106,144,35,151,40,145,40,212,0,27,216, - 9,26,215,9,38,210,9,38,211,9,40,168,24,209,9,50, - 128,6,244,2,6,12,2,128,67,241,14,0,5,10,136,71, - 144,67,144,53,152,2,152,54,160,35,153,59,152,45,208,10, - 40,214,4,41,243,15,6,12,2,114,15,0,0,0, + 0,0,0,0,0,243,168,0,0,0,149,0,91,0,81,0, + 72,0,113,0,91,0,81,0,72,1,113,1,90,2,34,0, + 81,1,52,1,0,0,0,0,0,0,32,0,90,2,34,0, + 81,2,90,0,79,6,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,52,2,0,0,0,0,0,0, + 32,0,90,1,79,8,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,34,0,52,0,0,0,0,0, + 0,0,81,3,2,0,0,0,113,5,81,4,18,0,69,20, + 0,0,113,6,90,2,34,0,81,5,90,6,13,0,81,6, + 90,5,90,6,2,0,0,0,13,0,50,4,52,1,0,0, + 0,0,0,0,32,0,74,22,0,0,10,0,31,0,81,0, + 36,0,41,7,78,122,18,70,114,111,122,101,110,32,72,101, + 108,108,111,32,87,111,114,108,100,122,8,115,121,115,46,97, + 114,103,118,218,6,99,111,110,102,105,103,41,5,218,12,112, + 114,111,103,114,97,109,95,110,97,109,101,218,10,101,120,101, + 99,117,116,97,98,108,101,218,15,117,115,101,95,101,110,118, + 105,114,111,110,109,101,110,116,218,17,99,111,110,102,105,103, + 117,114,101,95,99,95,115,116,100,105,111,218,14,98,117,102, + 102,101,114,101,100,95,115,116,100,105,111,122,7,99,111,110, + 102,105,103,32,122,2,58,32,41,7,218,3,115,121,115,218, + 17,95,116,101,115,116,105,110,116,101,114,110,97,108,99,97, + 112,105,218,5,112,114,105,110,116,218,4,97,114,103,118,218, + 11,103,101,116,95,99,111,110,102,105,103,115,114,2,0,0, + 0,218,3,107,101,121,169,0,243,0,0,0,0,218,18,116, + 101,115,116,95,102,114,111,122,101,110,109,97,105,110,46,112, + 121,218,8,60,109,111,100,117,108,101,62,114,17,0,0,0, + 1,0,0,0,115,94,0,0,0,240,3,1,1,1,243,8, + 0,1,11,219,0,24,225,0,5,208,6,26,212,0,27,217, + 0,5,128,106,144,35,151,40,145,40,212,0,27,216,9,26, + 215,9,38,210,9,38,211,9,40,168,24,209,9,50,128,6, + 243,2,6,12,2,128,67,241,14,0,5,10,136,71,144,67, + 144,53,152,2,152,54,160,35,153,59,152,45,208,10,40,214, + 4,41,243,15,6,12,2,114,15,0,0,0, }; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 41299b29705848..7038e3c92ab8f0 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -5661,10 +5661,11 @@ ast_repr_list(PyObject *list, int depth) return PyObject_Repr(list); } - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; PyObject *items[2] = {NULL, NULL}; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } items[0] = PySequence_GetItem(list, 0); if (!items[0]) { @@ -5678,52 +5679,54 @@ ast_repr_list(PyObject *list, int depth) } bool is_list = PyList_Check(list); - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? '[' : '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? '[' : '(') < 0) { goto error; } for (Py_ssize_t i = 0; i < Py_MIN(length, 2); i++) { - PyObject *item = items[i]; - PyObject *item_repr; + if (i > 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { + goto error; + } + } + PyObject *item = items[i]; if (PyType_IsSubtype(Py_TYPE(item), (PyTypeObject *)state->AST_type)) { + PyObject *item_repr; item_repr = ast_repr_max_depth((AST_object*)item, depth - 1); - } else { - item_repr = PyObject_Repr(item); - } - if (!item_repr) { - goto error; - } - if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (!item_repr) { + goto error; + } + if (PyUnicodeWriter_WriteStr(writer, item_repr) < 0) { + Py_DECREF(item_repr); goto error; } - } - if (_PyUnicodeWriter_WriteStr(&writer, item_repr) < 0) { Py_DECREF(item_repr); - goto error; + } else { + if (PyUnicodeWriter_WriteRepr(writer, item) < 0) { + goto error; + } } + if (i == 0 && length > 2) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ...", 5) < 0) { - Py_DECREF(item_repr); + if (PyUnicodeWriter_WriteUTF8(writer, ", ...", 5) < 0) { goto error; } } - Py_DECREF(item_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? ']' : ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? ']' : ')') < 0) { goto error; } Py_XDECREF(items[0]); Py_XDECREF(items[1]); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_XDECREF(items[0]); Py_XDECREF(items[1]); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } @@ -5767,14 +5770,15 @@ ast_repr_max_depth(AST_object *self, int depth) } const char* tp_name = Py_TYPE(self)->tp_name; - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } - if (_PyUnicodeWriter_WriteASCIIString(&writer, tp_name, strlen(tp_name)) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, tp_name, -1) < 0) { goto error; } - if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '(') < 0) { goto error; } @@ -5809,13 +5813,13 @@ ast_repr_max_depth(AST_object *self, int depth) } if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; } } - if (_PyUnicodeWriter_WriteStr(&writer, name) < 0) { + if (PyUnicodeWriter_WriteStr(writer, name) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; @@ -5823,11 +5827,11 @@ ast_repr_max_depth(AST_object *self, int depth) Py_DECREF(name); - if (_PyUnicodeWriter_WriteChar(&writer, '=') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '=') < 0) { Py_DECREF(value_repr); goto error; } - if (_PyUnicodeWriter_WriteStr(&writer, value_repr) < 0) { + if (PyUnicodeWriter_WriteStr(writer, value_repr) < 0) { Py_DECREF(value_repr); goto error; } @@ -5835,17 +5839,17 @@ ast_repr_max_depth(AST_object *self, int depth) Py_DECREF(value_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, ')') < 0) { goto error; } Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Python/_warnings.c b/Python/_warnings.c index e05ba99e8eaec4..283f203c72c9bf 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -232,6 +232,61 @@ get_warnings_attr(PyInterpreterState *interp, PyObject *attr, int try_import) return obj; } +static inline void +warnings_lock(PyInterpreterState *interp) +{ + WarningsState *st = warnings_get_state(interp); + assert(st != NULL); + _PyRecursiveMutex_Lock(&st->lock); +} + +static inline void +warnings_unlock(PyInterpreterState *interp) +{ + WarningsState *st = warnings_get_state(interp); + assert(st != NULL); + _PyRecursiveMutex_Unlock(&st->lock); +} + +static inline bool +warnings_lock_held(WarningsState *st) +{ + return PyMutex_IsLocked(&st->lock.mutex); +} + +/*[clinic input] +_acquire_lock as warnings_acquire_lock + +[clinic start generated code]*/ + +static PyObject * +warnings_acquire_lock_impl(PyObject *module) +/*[clinic end generated code: output=594313457d1bf8e1 input=46ec20e55acca52f]*/ +{ + PyInterpreterState *interp = get_current_interp(); + if (interp == NULL) { + return NULL; + } + warnings_lock(interp); + Py_RETURN_NONE; +} + +/*[clinic input] +_release_lock as warnings_release_lock + +[clinic start generated code]*/ + +static PyObject * +warnings_release_lock_impl(PyObject *module) +/*[clinic end generated code: output=d73d5a8789396750 input=ea01bb77870c5693]*/ +{ + PyInterpreterState *interp = get_current_interp(); + if (interp == NULL) { + return NULL; + } + warnings_unlock(interp); + Py_RETURN_NONE; +} static PyObject * get_once_registry(PyInterpreterState *interp) @@ -239,7 +294,7 @@ get_once_registry(PyInterpreterState *interp) WarningsState *st = warnings_get_state(interp); assert(st != NULL); - _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); + assert(warnings_lock_held(st)); PyObject *registry = GET_WARNINGS_ATTR(interp, onceregistry, 0); if (registry == NULL) { @@ -267,7 +322,7 @@ get_default_action(PyInterpreterState *interp) WarningsState *st = warnings_get_state(interp); assert(st != NULL); - _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); + assert(warnings_lock_held(st)); PyObject *default_action = GET_WARNINGS_ATTR(interp, defaultaction, 0); if (default_action == NULL) { @@ -299,7 +354,7 @@ get_filter(PyInterpreterState *interp, PyObject *category, WarningsState *st = warnings_get_state(interp); assert(st != NULL); - _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); + assert(warnings_lock_held(st)); PyObject *warnings_filters = GET_WARNINGS_ATTR(interp, filters, 0); if (warnings_filters == NULL) { @@ -399,7 +454,7 @@ already_warned(PyInterpreterState *interp, PyObject *registry, PyObject *key, WarningsState *st = warnings_get_state(interp); assert(st != NULL); - _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); + assert(warnings_lock_held(st)); PyObject *version_obj; if (PyDict_GetItemRef(registry, &_Py_ID(version), &version_obj) < 0) { @@ -994,15 +1049,10 @@ do_warn(PyObject *message, PyObject *category, Py_ssize_t stack_level, &filename, &lineno, &module, ®istry)) return NULL; -#ifdef Py_GIL_DISABLED - WarningsState *st = warnings_get_state(tstate->interp); - assert(st != NULL); -#endif - - Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); + warnings_lock(tstate->interp); res = warn_explicit(tstate, category, message, filename, lineno, module, registry, NULL, source); - Py_END_CRITICAL_SECTION(); + warnings_unlock(tstate->interp); Py_DECREF(filename); Py_DECREF(registry); Py_DECREF(module); @@ -1151,27 +1201,22 @@ warnings_warn_explicit_impl(PyObject *module, PyObject *message, } } -#ifdef Py_GIL_DISABLED - WarningsState *st = warnings_get_state(tstate->interp); - assert(st != NULL); -#endif - - Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); + warnings_lock(tstate->interp); returned = warn_explicit(tstate, category, message, filename, lineno, mod, registry, source_line, sourceobj); - Py_END_CRITICAL_SECTION(); + warnings_unlock(tstate->interp); Py_XDECREF(source_line); return returned; } /*[clinic input] -_filters_mutated as warnings_filters_mutated +_filters_mutated_lock_held as warnings_filters_mutated_lock_held [clinic start generated code]*/ static PyObject * -warnings_filters_mutated_impl(PyObject *module) -/*[clinic end generated code: output=8ce517abd12b88f4 input=35ecbf08ee2491b2]*/ +warnings_filters_mutated_lock_held_impl(PyObject *module) +/*[clinic end generated code: output=df5c84f044e856ec input=34208bf03d70e432]*/ { PyInterpreterState *interp = get_current_interp(); if (interp == NULL) { @@ -1181,14 +1226,17 @@ warnings_filters_mutated_impl(PyObject *module) WarningsState *st = warnings_get_state(interp); assert(st != NULL); - Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); + // Note that the lock must be held by the caller. + if (!warnings_lock_held(st)) { + PyErr_SetString(PyExc_RuntimeError, "warnings lock is not held"); + return NULL; + } + st->filters_version++; - Py_END_CRITICAL_SECTION(); Py_RETURN_NONE; } - /* Function to issue a warning message; may raise an exception. */ static int @@ -1303,15 +1351,10 @@ PyErr_WarnExplicitObject(PyObject *category, PyObject *message, return -1; } -#ifdef Py_GIL_DISABLED - WarningsState *st = warnings_get_state(tstate->interp); - assert(st != NULL); -#endif - - Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); + warnings_lock(tstate->interp); res = warn_explicit(tstate, category, message, filename, lineno, module, registry, NULL, NULL); - Py_END_CRITICAL_SECTION(); + warnings_unlock(tstate->interp); if (res == NULL) return -1; Py_DECREF(res); @@ -1376,15 +1419,10 @@ PyErr_WarnExplicitFormat(PyObject *category, PyObject *res; PyThreadState *tstate = get_current_tstate(); if (tstate != NULL) { -#ifdef Py_GIL_DISABLED - WarningsState *st = warnings_get_state(tstate->interp); - assert(st != NULL); -#endif - - Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); + warnings_lock(tstate->interp); res = warn_explicit(tstate, category, message, filename, lineno, module, registry, NULL, NULL); - Py_END_CRITICAL_SECTION(); + warnings_unlock(tstate->interp); Py_DECREF(message); if (res != NULL) { Py_DECREF(res); @@ -1464,7 +1502,9 @@ _PyErr_WarnUnawaitedCoroutine(PyObject *coro) static PyMethodDef warnings_functions[] = { WARNINGS_WARN_METHODDEF WARNINGS_WARN_EXPLICIT_METHODDEF - WARNINGS_FILTERS_MUTATED_METHODDEF + WARNINGS_FILTERS_MUTATED_LOCK_HELD_METHODDEF + WARNINGS_ACQUIRE_LOCK_METHODDEF + WARNINGS_RELEASE_LOCK_METHODDEF /* XXX(brett.cannon): add showwarning? */ /* XXX(brett.cannon): Reasonable to add formatwarning? */ {NULL, NULL} /* sentinel */ diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 8017cfc7fcf268..f3c669c33eb07c 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -16,24 +16,40 @@ _Py_DECLARE_STR(dbl_close_br, "}}"); static PyObject * expr_as_unicode(expr_ty e, int level); static int -append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level); +append_ast_expr(PyUnicodeWriter *writer, expr_ty e, int level); static int -append_joinedstr(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec); +append_joinedstr(PyUnicodeWriter *writer, expr_ty e, bool is_format_spec); static int -append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e); +append_formattedvalue(PyUnicodeWriter *writer, expr_ty e); static int -append_ast_slice(_PyUnicodeWriter *writer, expr_ty e); +append_ast_slice(PyUnicodeWriter *writer, expr_ty e); static int -append_charp(_PyUnicodeWriter *writer, const char *charp) +append_char(PyUnicodeWriter *writer, Py_UCS4 ch) { - return _PyUnicodeWriter_WriteASCIIString(writer, charp, -1); + return PyUnicodeWriter_WriteChar(writer, ch); } +static int +append_charp(PyUnicodeWriter *writer, const char *charp) +{ + return PyUnicodeWriter_WriteUTF8(writer, charp, -1); +} + +#define APPEND_CHAR_FINISH(ch) do { \ + return append_char(writer, (ch)); \ + } while (0) + #define APPEND_STR_FINISH(str) do { \ return append_charp(writer, (str)); \ } while (0) +#define APPEND_CHAR(ch) do { \ + if (-1 == append_char(writer, (ch))) { \ + return -1; \ + } \ + } while (0) + #define APPEND_STR(str) do { \ if (-1 == append_charp(writer, (str))) { \ return -1; \ @@ -64,10 +80,9 @@ append_charp(_PyUnicodeWriter *writer, const char *charp) } while (0) static int -append_repr(_PyUnicodeWriter *writer, PyObject *obj) +append_repr(PyUnicodeWriter *writer, PyObject *obj) { PyObject *repr = PyObject_Repr(obj); - if (!repr) { return -1; } @@ -88,7 +103,8 @@ append_repr(_PyUnicodeWriter *writer, PyObject *obj) } repr = new_repr; } - int ret = _PyUnicodeWriter_WriteStr(writer, repr); + + int ret = PyUnicodeWriter_WriteStr(writer, repr); Py_DECREF(repr); return ret; } @@ -117,7 +133,7 @@ enum { }; static int -append_ast_boolop(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_boolop(PyUnicodeWriter *writer, expr_ty e, int level) { Py_ssize_t i, value_count; asdl_expr_seq *values; @@ -139,7 +155,7 @@ append_ast_boolop(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_binop(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_binop(PyUnicodeWriter *writer, expr_ty e, int level) { const char *op; int pr; @@ -174,7 +190,7 @@ append_ast_binop(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_unaryop(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_unaryop(PyUnicodeWriter *writer, expr_ty e, int level) { const char *op; int pr; @@ -198,9 +214,9 @@ append_ast_unaryop(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_arg(_PyUnicodeWriter *writer, arg_ty arg) +append_ast_arg(PyUnicodeWriter *writer, arg_ty arg) { - if (-1 == _PyUnicodeWriter_WriteStr(writer, arg->arg)) { + if (PyUnicodeWriter_WriteStr(writer, arg->arg) < 0) { return -1; } if (arg->annotation) { @@ -211,7 +227,7 @@ append_ast_arg(_PyUnicodeWriter *writer, arg_ty arg) } static int -append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) +append_ast_args(PyUnicodeWriter *writer, arguments_ty args) { bool first; Py_ssize_t i, di, arg_count, posonlyarg_count, default_count; @@ -232,7 +248,7 @@ append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) di = i - posonlyarg_count - arg_count + default_count; if (di >= 0) { - APPEND_STR("="); + APPEND_CHAR('='); APPEND_EXPR((expr_ty)asdl_seq_GET(args->defaults, di), PR_TEST); } if (posonlyarg_count && i + 1 == posonlyarg_count) { @@ -260,7 +276,7 @@ append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) if (di >= 0) { expr_ty default_ = (expr_ty)asdl_seq_GET(args->kw_defaults, di); if (default_) { - APPEND_STR("="); + APPEND_CHAR('='); APPEND_EXPR(default_, PR_TEST); } } @@ -277,7 +293,7 @@ append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) } static int -append_ast_lambda(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_lambda(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_TEST, "("); Py_ssize_t n_positional = (asdl_seq_LEN(e->v.Lambda.args->args) + @@ -291,7 +307,7 @@ append_ast_lambda(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_ifexp(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_ifexp(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_TEST, "("); APPEND_EXPR(e->v.IfExp.body, PR_TEST + 1); @@ -304,12 +320,12 @@ append_ast_ifexp(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_dict(_PyUnicodeWriter *writer, expr_ty e) +append_ast_dict(PyUnicodeWriter *writer, expr_ty e) { Py_ssize_t i, value_count; expr_ty key_node; - APPEND_STR("{"); + APPEND_CHAR('{'); value_count = asdl_seq_LEN(e->v.Dict.values); for (i = 0; i < value_count; i++) { @@ -326,41 +342,41 @@ append_ast_dict(_PyUnicodeWriter *writer, expr_ty e) } } - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_set(_PyUnicodeWriter *writer, expr_ty e) +append_ast_set(PyUnicodeWriter *writer, expr_ty e) { Py_ssize_t i, elem_count; - APPEND_STR("{"); + APPEND_CHAR('{'); elem_count = asdl_seq_LEN(e->v.Set.elts); for (i = 0; i < elem_count; i++) { APPEND_STR_IF(i > 0, ", "); APPEND_EXPR((expr_ty)asdl_seq_GET(e->v.Set.elts, i), PR_TEST); } - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_list(_PyUnicodeWriter *writer, expr_ty e) +append_ast_list(PyUnicodeWriter *writer, expr_ty e) { Py_ssize_t i, elem_count; - APPEND_STR("["); + APPEND_CHAR('['); elem_count = asdl_seq_LEN(e->v.List.elts); for (i = 0; i < elem_count; i++) { APPEND_STR_IF(i > 0, ", "); APPEND_EXPR((expr_ty)asdl_seq_GET(e->v.List.elts, i), PR_TEST); } - APPEND_STR_FINISH("]"); + APPEND_CHAR_FINISH(']'); } static int -append_ast_tuple(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_tuple(PyUnicodeWriter *writer, expr_ty e, int level) { Py_ssize_t i, elem_count; @@ -383,7 +399,7 @@ append_ast_tuple(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_comprehension(_PyUnicodeWriter *writer, comprehension_ty gen) +append_ast_comprehension(PyUnicodeWriter *writer, comprehension_ty gen) { Py_ssize_t i, if_count; @@ -401,7 +417,7 @@ append_ast_comprehension(_PyUnicodeWriter *writer, comprehension_ty gen) } static int -append_ast_comprehensions(_PyUnicodeWriter *writer, asdl_comprehension_seq *comprehensions) +append_ast_comprehensions(PyUnicodeWriter *writer, asdl_comprehension_seq *comprehensions) { Py_ssize_t i, gen_count; gen_count = asdl_seq_LEN(comprehensions); @@ -414,45 +430,45 @@ append_ast_comprehensions(_PyUnicodeWriter *writer, asdl_comprehension_seq *comp } static int -append_ast_genexp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_genexp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("("); + APPEND_CHAR('('); APPEND_EXPR(e->v.GeneratorExp.elt, PR_TEST); APPEND(comprehensions, e->v.GeneratorExp.generators); - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static int -append_ast_listcomp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_listcomp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("["); + APPEND_CHAR('['); APPEND_EXPR(e->v.ListComp.elt, PR_TEST); APPEND(comprehensions, e->v.ListComp.generators); - APPEND_STR_FINISH("]"); + APPEND_CHAR_FINISH(']'); } static int -append_ast_setcomp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_setcomp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("{"); + APPEND_CHAR('{'); APPEND_EXPR(e->v.SetComp.elt, PR_TEST); APPEND(comprehensions, e->v.SetComp.generators); - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_dictcomp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_dictcomp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("{"); + APPEND_CHAR('{'); APPEND_EXPR(e->v.DictComp.key, PR_TEST); APPEND_STR(": "); APPEND_EXPR(e->v.DictComp.value, PR_TEST); APPEND(comprehensions, e->v.DictComp.generators); - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_compare(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_compare(PyUnicodeWriter *writer, expr_ty e, int level) { const char *op; Py_ssize_t i, comparator_count; @@ -516,17 +532,17 @@ append_ast_compare(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_keyword(_PyUnicodeWriter *writer, keyword_ty kw) +append_ast_keyword(PyUnicodeWriter *writer, keyword_ty kw) { if (kw->arg == NULL) { APPEND_STR("**"); } else { - if (-1 == _PyUnicodeWriter_WriteStr(writer, kw->arg)) { + if (-1 == PyUnicodeWriter_WriteStr(writer, kw->arg)) { return -1; } - APPEND_STR("="); + APPEND_CHAR('='); } APPEND_EXPR(kw->value, PR_TEST); @@ -534,7 +550,7 @@ append_ast_keyword(_PyUnicodeWriter *writer, keyword_ty kw) } static int -append_ast_call(_PyUnicodeWriter *writer, expr_ty e) +append_ast_call(PyUnicodeWriter *writer, expr_ty e) { bool first; Py_ssize_t i, arg_count, kw_count; @@ -552,7 +568,7 @@ append_ast_call(_PyUnicodeWriter *writer, expr_ty e) } } - APPEND_STR("("); + APPEND_CHAR('('); first = true; for (i = 0; i < arg_count; i++) { @@ -565,7 +581,7 @@ append_ast_call(_PyUnicodeWriter *writer, expr_ty e) APPEND(keyword, (keyword_ty)asdl_seq_GET(e->v.Call.keywords, i)); } - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static PyObject * @@ -585,20 +601,20 @@ escape_braces(PyObject *orig) } static int -append_fstring_unicode(_PyUnicodeWriter *writer, PyObject *unicode) +append_fstring_unicode(PyUnicodeWriter *writer, PyObject *unicode) { PyObject *escaped; int result = -1; escaped = escape_braces(unicode); if (escaped) { - result = _PyUnicodeWriter_WriteStr(writer, escaped); + result = PyUnicodeWriter_WriteStr(writer, escaped); Py_DECREF(escaped); } return result; } static int -append_fstring_element(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) +append_fstring_element(PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) { switch (e->kind) { case Constant_kind: @@ -619,28 +635,27 @@ append_fstring_element(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) static PyObject * build_fstring_body(asdl_expr_seq *values, bool is_format_spec) { - Py_ssize_t i, value_count; - _PyUnicodeWriter body_writer; - _PyUnicodeWriter_Init(&body_writer); - body_writer.min_length = 256; - body_writer.overallocate = 1; + PyUnicodeWriter *body_writer = PyUnicodeWriter_Create(256); + if (body_writer == NULL) { + return NULL; + } - value_count = asdl_seq_LEN(values); - for (i = 0; i < value_count; ++i) { - if (-1 == append_fstring_element(&body_writer, + Py_ssize_t value_count = asdl_seq_LEN(values); + for (Py_ssize_t i = 0; i < value_count; ++i) { + if (-1 == append_fstring_element(body_writer, (expr_ty)asdl_seq_GET(values, i), is_format_spec )) { - _PyUnicodeWriter_Dealloc(&body_writer); + PyUnicodeWriter_Discard(body_writer); return NULL; } } - return _PyUnicodeWriter_Finish(&body_writer); + return PyUnicodeWriter_Finish(body_writer); } static int -append_joinedstr(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) +append_joinedstr(PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) { int result = -1; PyObject *body = build_fstring_body(e->v.JoinedStr.values, is_format_spec); @@ -656,14 +671,14 @@ append_joinedstr(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) } } else { - result = _PyUnicodeWriter_WriteStr(writer, body); + result = PyUnicodeWriter_WriteStr(writer, body); } Py_DECREF(body); return result; } static int -append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) +append_formattedvalue(PyUnicodeWriter *writer, expr_ty e) { const char *conversion; const char *outer_brace = "{"; @@ -682,7 +697,7 @@ append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) Py_DECREF(temp_fv_str); return -1; } - if (-1 == _PyUnicodeWriter_WriteStr(writer, temp_fv_str)) { + if (-1 == PyUnicodeWriter_WriteStr(writer, temp_fv_str)) { Py_DECREF(temp_fv_str); return -1; } @@ -707,7 +722,7 @@ append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) APPEND_STR(conversion); } if (e->v.FormattedValue.format_spec) { - if (-1 == _PyUnicodeWriter_WriteASCIIString(writer, ":", 1) || + if (-1 == PyUnicodeWriter_WriteChar(writer, ':') || -1 == append_fstring_element(writer, e->v.FormattedValue.format_spec, true @@ -717,17 +732,17 @@ append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) } } - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_constant(_PyUnicodeWriter *writer, PyObject *constant) +append_ast_constant(PyUnicodeWriter *writer, PyObject *constant) { if (PyTuple_CheckExact(constant)) { Py_ssize_t i, elem_count; elem_count = PyTuple_GET_SIZE(constant); - APPEND_STR("("); + APPEND_CHAR('('); for (i = 0; i < elem_count; i++) { APPEND_STR_IF(i > 0, ", "); if (append_ast_constant(writer, PyTuple_GET_ITEM(constant, i)) < 0) { @@ -736,14 +751,13 @@ append_ast_constant(_PyUnicodeWriter *writer, PyObject *constant) } APPEND_STR_IF(elem_count == 1, ","); - APPEND_STR(")"); - return 0; + APPEND_CHAR_FINISH(')'); } return append_repr(writer, constant); } static int -append_ast_attribute(_PyUnicodeWriter *writer, expr_ty e) +append_ast_attribute(PyUnicodeWriter *writer, expr_ty e) { const char *period; expr_ty v = e->v.Attribute.value; @@ -759,48 +773,48 @@ append_ast_attribute(_PyUnicodeWriter *writer, expr_ty e) } APPEND_STR(period); - return _PyUnicodeWriter_WriteStr(writer, e->v.Attribute.attr); + return PyUnicodeWriter_WriteStr(writer, e->v.Attribute.attr); } static int -append_ast_slice(_PyUnicodeWriter *writer, expr_ty e) +append_ast_slice(PyUnicodeWriter *writer, expr_ty e) { if (e->v.Slice.lower) { APPEND_EXPR(e->v.Slice.lower, PR_TEST); } - APPEND_STR(":"); + APPEND_CHAR(':'); if (e->v.Slice.upper) { APPEND_EXPR(e->v.Slice.upper, PR_TEST); } if (e->v.Slice.step) { - APPEND_STR(":"); + APPEND_CHAR(':'); APPEND_EXPR(e->v.Slice.step, PR_TEST); } return 0; } static int -append_ast_subscript(_PyUnicodeWriter *writer, expr_ty e) +append_ast_subscript(PyUnicodeWriter *writer, expr_ty e) { APPEND_EXPR(e->v.Subscript.value, PR_ATOM); - APPEND_STR("["); + APPEND_CHAR('['); APPEND_EXPR(e->v.Subscript.slice, PR_TUPLE); - APPEND_STR_FINISH("]"); + APPEND_CHAR_FINISH(']'); } static int -append_ast_starred(_PyUnicodeWriter *writer, expr_ty e) +append_ast_starred(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("*"); + APPEND_CHAR('*'); APPEND_EXPR(e->v.Starred.value, PR_EXPR); return 0; } static int -append_ast_yield(_PyUnicodeWriter *writer, expr_ty e) +append_ast_yield(PyUnicodeWriter *writer, expr_ty e) { if (!e->v.Yield.value) { APPEND_STR_FINISH("(yield)"); @@ -808,19 +822,19 @@ append_ast_yield(_PyUnicodeWriter *writer, expr_ty e) APPEND_STR("(yield "); APPEND_EXPR(e->v.Yield.value, PR_TEST); - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static int -append_ast_yield_from(_PyUnicodeWriter *writer, expr_ty e) +append_ast_yield_from(PyUnicodeWriter *writer, expr_ty e) { APPEND_STR("(yield from "); APPEND_EXPR(e->v.YieldFrom.value, PR_TEST); - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static int -append_ast_await(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_await(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_AWAIT, "("); APPEND_STR("await "); @@ -830,7 +844,7 @@ append_ast_await(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_named_expr(_PyUnicodeWriter *writer, expr_ty e, int level) +append_named_expr(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_TUPLE, "("); APPEND_EXPR(e->v.NamedExpr.target, PR_ATOM); @@ -841,7 +855,7 @@ append_named_expr(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_expr(PyUnicodeWriter *writer, expr_ty e, int level) { switch (e->kind) { case BoolOp_kind: @@ -881,7 +895,7 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) APPEND_STR_FINISH("..."); } if (e->v.Constant.kind != NULL - && -1 == _PyUnicodeWriter_WriteStr(writer, e->v.Constant.kind)) { + && -1 == PyUnicodeWriter_WriteStr(writer, e->v.Constant.kind)) { return -1; } return append_ast_constant(writer, e->v.Constant.value); @@ -899,7 +913,7 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) case Slice_kind: return append_ast_slice(writer, e); case Name_kind: - return _PyUnicodeWriter_WriteStr(writer, e->v.Name.id); + return PyUnicodeWriter_WriteStr(writer, e->v.Name.id); case List_kind: return append_ast_list(writer, e); case Tuple_kind: @@ -916,15 +930,16 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) static PyObject * expr_as_unicode(expr_ty e, int level) { - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.min_length = 256; - writer.overallocate = 1; - if (-1 == append_ast_expr(&writer, e, level)) { - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter *writer = PyUnicodeWriter_Create(256); + if (writer == NULL) { + return NULL; + } + + if (-1 == append_ast_expr(writer, e, level)) { + PyUnicodeWriter_Discard(writer); return NULL; } - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); } PyObject * diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index fb9868b3740b8c..46a6fd9a8ef017 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -494,6 +494,8 @@ typedef struct { PyObject *it; } filterobject; +#define _filterobject_CAST(op) ((filterobject *)(op)) + static PyObject * filter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { @@ -559,8 +561,9 @@ filter_vectorcall(PyObject *type, PyObject * const*args, } static void -filter_dealloc(filterobject *lz) +filter_dealloc(PyObject *self) { + filterobject *lz = _filterobject_CAST(self); PyObject_GC_UnTrack(lz); Py_TRASHCAN_BEGIN(lz, filter_dealloc) Py_XDECREF(lz->func); @@ -570,16 +573,18 @@ filter_dealloc(filterobject *lz) } static int -filter_traverse(filterobject *lz, visitproc visit, void *arg) +filter_traverse(PyObject *self, visitproc visit, void *arg) { + filterobject *lz = _filterobject_CAST(self); Py_VISIT(lz->it); Py_VISIT(lz->func); return 0; } static PyObject * -filter_next(filterobject *lz) +filter_next(PyObject *self) { + filterobject *lz = _filterobject_CAST(self); PyObject *item; PyObject *it = lz->it; long ok; @@ -613,15 +618,16 @@ filter_next(filterobject *lz) } static PyObject * -filter_reduce(filterobject *lz, PyObject *Py_UNUSED(ignored)) +filter_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { + filterobject *lz = _filterobject_CAST(self); return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it); } PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); static PyMethodDef filter_methods[] = { - {"__reduce__", _PyCFunction_CAST(filter_reduce), METH_NOARGS, reduce_doc}, + {"__reduce__", filter_reduce, METH_NOARGS, reduce_doc}, {NULL, NULL} /* sentinel */ }; @@ -638,7 +644,7 @@ PyTypeObject PyFilter_Type = { sizeof(filterobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)filter_dealloc, /* tp_dealloc */ + filter_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -656,12 +662,12 @@ PyTypeObject PyFilter_Type = { Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /* tp_flags */ filter_doc, /* tp_doc */ - (traverseproc)filter_traverse, /* tp_traverse */ + filter_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)filter_next, /* tp_iternext */ + filter_next, /* tp_iternext */ filter_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ @@ -674,7 +680,7 @@ PyTypeObject PyFilter_Type = { PyType_GenericAlloc, /* tp_alloc */ filter_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ - .tp_vectorcall = (vectorcallfunc)filter_vectorcall + .tp_vectorcall = filter_vectorcall }; @@ -1319,6 +1325,8 @@ typedef struct { int strict; } mapobject; +#define _mapobject_CAST(op) ((mapobject *)(op)) + static PyObject * map_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { @@ -1422,8 +1430,9 @@ map_vectorcall(PyObject *type, PyObject * const*args, } static void -map_dealloc(mapobject *lz) +map_dealloc(PyObject *self) { + mapobject *lz = _mapobject_CAST(self); PyObject_GC_UnTrack(lz); Py_XDECREF(lz->iters); Py_XDECREF(lz->func); @@ -1431,16 +1440,18 @@ map_dealloc(mapobject *lz) } static int -map_traverse(mapobject *lz, visitproc visit, void *arg) +map_traverse(PyObject *self, visitproc visit, void *arg) { + mapobject *lz = _mapobject_CAST(self); Py_VISIT(lz->iters); Py_VISIT(lz->func); return 0; } static PyObject * -map_next(mapobject *lz) +map_next(PyObject *self) { + mapobject *lz = _mapobject_CAST(self); Py_ssize_t i; PyObject *small_stack[_PY_FASTCALL_SMALL_STACK]; PyObject **stack; @@ -1523,8 +1534,9 @@ map_next(mapobject *lz) } static PyObject * -map_reduce(mapobject *lz, PyObject *Py_UNUSED(ignored)) +map_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { + mapobject *lz = _mapobject_CAST(self); Py_ssize_t numargs = PyTuple_GET_SIZE(lz->iters); PyObject *args = PyTuple_New(numargs+1); Py_ssize_t i; @@ -1545,19 +1557,20 @@ map_reduce(mapobject *lz, PyObject *Py_UNUSED(ignored)) PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); static PyObject * -map_setstate(mapobject *lz, PyObject *state) +map_setstate(PyObject *self, PyObject *state) { int strict = PyObject_IsTrue(state); if (strict < 0) { return NULL; } + mapobject *lz = _mapobject_CAST(self); lz->strict = strict; Py_RETURN_NONE; } static PyMethodDef map_methods[] = { - {"__reduce__", _PyCFunction_CAST(map_reduce), METH_NOARGS, reduce_doc}, - {"__setstate__", _PyCFunction_CAST(map_setstate), METH_O, setstate_doc}, + {"__reduce__", map_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", map_setstate, METH_O, setstate_doc}, {NULL, NULL} /* sentinel */ }; @@ -1578,7 +1591,7 @@ PyTypeObject PyMap_Type = { sizeof(mapobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)map_dealloc, /* tp_dealloc */ + map_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -1596,12 +1609,12 @@ PyTypeObject PyMap_Type = { Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /* tp_flags */ map_doc, /* tp_doc */ - (traverseproc)map_traverse, /* tp_traverse */ + map_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)map_next, /* tp_iternext */ + map_next, /* tp_iternext */ map_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ @@ -1614,7 +1627,7 @@ PyTypeObject PyMap_Type = { PyType_GenericAlloc, /* tp_alloc */ map_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ - .tp_vectorcall = (vectorcallfunc)map_vectorcall + .tp_vectorcall = map_vectorcall }; @@ -2965,6 +2978,8 @@ typedef struct { int strict; } zipobject; +#define _zipobject_CAST(op) ((zipobject *)(op)) + static PyObject * zip_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { @@ -3033,8 +3048,9 @@ zip_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } static void -zip_dealloc(zipobject *lz) +zip_dealloc(PyObject *self) { + zipobject *lz = _zipobject_CAST(self); PyObject_GC_UnTrack(lz); Py_XDECREF(lz->ittuple); Py_XDECREF(lz->result); @@ -3042,16 +3058,19 @@ zip_dealloc(zipobject *lz) } static int -zip_traverse(zipobject *lz, visitproc visit, void *arg) +zip_traverse(PyObject *self, visitproc visit, void *arg) { + zipobject *lz = _zipobject_CAST(self); Py_VISIT(lz->ittuple); Py_VISIT(lz->result); return 0; } static PyObject * -zip_next(zipobject *lz) +zip_next(PyObject *self) { + zipobject *lz = _zipobject_CAST(self); + Py_ssize_t i; Py_ssize_t tuplesize = lz->tuplesize; PyObject *result = lz->result; @@ -3141,8 +3160,9 @@ zip_next(zipobject *lz) } static PyObject * -zip_reduce(zipobject *lz, PyObject *Py_UNUSED(ignored)) +zip_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { + zipobject *lz = _zipobject_CAST(self); /* Just recreate the zip with the internal iterator tuple */ if (lz->strict) { return PyTuple_Pack(3, Py_TYPE(lz), lz->ittuple, Py_True); @@ -3151,19 +3171,20 @@ zip_reduce(zipobject *lz, PyObject *Py_UNUSED(ignored)) } static PyObject * -zip_setstate(zipobject *lz, PyObject *state) +zip_setstate(PyObject *self, PyObject *state) { int strict = PyObject_IsTrue(state); if (strict < 0) { return NULL; } + zipobject *lz = _zipobject_CAST(self); lz->strict = strict; Py_RETURN_NONE; } static PyMethodDef zip_methods[] = { - {"__reduce__", _PyCFunction_CAST(zip_reduce), METH_NOARGS, reduce_doc}, - {"__setstate__", _PyCFunction_CAST(zip_setstate), METH_O, setstate_doc}, + {"__reduce__", zip_reduce, METH_NOARGS, reduce_doc}, + {"__setstate__", zip_setstate, METH_O, setstate_doc}, {NULL} /* sentinel */ }; @@ -3188,7 +3209,7 @@ PyTypeObject PyZip_Type = { sizeof(zipobject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)zip_dealloc, /* tp_dealloc */ + zip_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -3206,12 +3227,12 @@ PyTypeObject PyZip_Type = { Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE, /* tp_flags */ zip_doc, /* tp_doc */ - (traverseproc)zip_traverse, /* tp_traverse */ + zip_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)zip_next, /* tp_iternext */ + zip_next, /* tp_iternext */ zip_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 602cf7f47b812b..908eb0c2a698c5 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -53,6 +53,7 @@ #define super(name) static int SUPER_##name #define family(name, ...) static int family_##name #define pseudo(name) static int pseudo_##name +#define label(name) name: /* Annotations */ #define guard @@ -60,6 +61,8 @@ #define specializing #define split #define replicate(TIMES) +#define tier1 +#define no_save_ip // Dummy variables for stack effects. static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub; @@ -101,7 +104,6 @@ dummy_func( PyObject *codeobj; PyObject *cond; PyObject *descr; - _PyInterpreterFrame entry_frame; PyObject *exc; PyObject *exit; PyObject *fget; @@ -283,11 +285,35 @@ dummy_func( } family(LOAD_CONST, 0) = { + LOAD_CONST_MORTAL, LOAD_CONST_IMMORTAL, }; - pure inst(LOAD_CONST, (-- value)) { - value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg)); + inst(LOAD_CONST, (-- value)) { + /* We can't do this in the bytecode compiler as + * marshalling can intern strings and make them immortal. */ + PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); + value = PyStackRef_FromPyObjectNew(obj); +#if ENABLE_SPECIALIZATION_FT +#ifdef Py_GIL_DISABLED + uint8_t expected = LOAD_CONST; + if (!_Py_atomic_compare_exchange_uint8( + &this_instr->op.code, &expected, + _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) { + // We might lose a race with instrumentation, which we don't care about. + assert(expected >= MIN_INSTRUMENTED_OPCODE); + } +#else + if (this_instr->op.code == LOAD_CONST) { + this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL; + } +#endif +#endif + } + + inst(LOAD_CONST_MORTAL, (-- value)) { + PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); + value = PyStackRef_FromPyObjectNew(obj); } inst(LOAD_CONST_IMMORTAL, (-- value)) { @@ -336,9 +362,18 @@ dummy_func( res = PyStackRef_NULL; } - macro(END_FOR) = POP_TOP; + no_save_ip inst(END_FOR, (value -- )) { + /* Don't update instr_ptr, so that POP_ITER sees + * the FOR_ITER as the previous instruction. + * This has the benign side effect that if value is + * finalized it will see the location as the FOR_ITER's. + */ + PyStackRef_CLOSE(value); + } - tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) { + macro(POP_ITER) = POP_TOP; + + no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) { /* Need to create a fake StopIteration error here, * to conform to PEP 380 */ if (PyStackRef_GenCheck(receiver)) { @@ -350,11 +385,16 @@ dummy_func( DECREF_INPUTS(); } + tier1 inst(INSTRUMENTED_POP_ITER, (iter -- )) { + INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT); + PyStackRef_CLOSE(iter); + } + pure inst(END_SEND, (receiver, value -- val)) { (void)receiver; val = value; DEAD(value); - PyStackRef_CLOSE(receiver); + DECREF_INPUTS(); } tier1 inst(INSTRUMENTED_END_SEND, (receiver, value -- val)) { @@ -492,6 +532,7 @@ dummy_func( BINARY_OP_SUBTRACT_FLOAT, BINARY_OP_ADD_UNICODE, // BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode. + BINARY_OP_EXTEND, }; op(_GUARD_BOTH_INT, (left, right -- left, right)) { @@ -514,6 +555,8 @@ dummy_func( pure op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); @@ -527,6 +570,8 @@ dummy_func( pure op(_BINARY_OP_ADD_INT, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); @@ -540,6 +585,8 @@ dummy_func( pure op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); @@ -551,11 +598,11 @@ dummy_func( } macro(BINARY_OP_MULTIPLY_INT) = - _GUARD_BOTH_INT + unused/1 + _BINARY_OP_MULTIPLY_INT; + _GUARD_BOTH_INT + unused/5 + _BINARY_OP_MULTIPLY_INT; macro(BINARY_OP_ADD_INT) = - _GUARD_BOTH_INT + unused/1 + _BINARY_OP_ADD_INT; + _GUARD_BOTH_INT + unused/5 + _BINARY_OP_ADD_INT; macro(BINARY_OP_SUBTRACT_INT) = - _GUARD_BOTH_INT + unused/1 + _BINARY_OP_SUBTRACT_INT; + _GUARD_BOTH_INT + unused/5 + _BINARY_OP_SUBTRACT_INT; op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -577,6 +624,8 @@ dummy_func( pure op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = @@ -591,6 +640,8 @@ dummy_func( pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = @@ -605,6 +656,8 @@ dummy_func( pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = @@ -617,11 +670,11 @@ dummy_func( } macro(BINARY_OP_MULTIPLY_FLOAT) = - _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_MULTIPLY_FLOAT; + _GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_MULTIPLY_FLOAT; macro(BINARY_OP_ADD_FLOAT) = - _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_ADD_FLOAT; + _GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_ADD_FLOAT; macro(BINARY_OP_SUBTRACT_FLOAT) = - _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_SUBTRACT_FLOAT; + _GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_SUBTRACT_FLOAT; op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -634,18 +687,20 @@ dummy_func( pure op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyUnicode_CheckExact(left_o)); + assert(PyUnicode_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = PyUnicode_Concat(left_o, right_o); - PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); + PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); INPUTS_DEAD(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } macro(BINARY_OP_ADD_UNICODE) = - _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_ADD_UNICODE; + _GUARD_BOTH_UNICODE + unused/5 + _BINARY_OP_ADD_UNICODE; // This is a subtle one. It's a super-instruction for // BINARY_OP_ADD_UNICODE followed by STORE_FAST @@ -656,6 +711,8 @@ dummy_func( op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyUnicode_CheckExact(left_o)); + assert(PyUnicode_CheckExact(right_o)); int next_oparg; #if TIER_ONE @@ -679,7 +736,7 @@ dummy_func( * that the string is safe to mutate. */ assert(Py_REFCNT(left_o) >= 2); - PyStackRef_CLOSE(left); + PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); DEAD(left); PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local); PyUnicode_Append(&temp, right_o); @@ -695,8 +752,34 @@ dummy_func( #endif } + op(_GUARD_BINARY_OP_EXTEND, (descr/4, left, right -- left, right)) { + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + assert(d && d->guard); + int res = d->guard(left_o, right_o); + DEOPT_IF(!res); + } + + pure op(_BINARY_OP_EXTEND, (descr/4, left, right -- res)) { + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + + STAT_INC(BINARY_OP, hit); + + PyObject *res_o = d->action(left_o, right_o); + DECREF_INPUTS(); + res = PyStackRef_FromPyObjectSteal(res_o); + } + + macro(BINARY_OP_EXTEND) = + unused/1 + _GUARD_BINARY_OP_EXTEND + rewind/-4 + _BINARY_OP_EXTEND; + macro(BINARY_OP_INPLACE_ADD_UNICODE) = - _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_INPLACE_ADD_UNICODE; + _GUARD_BOTH_UNICODE + unused/5 + _BINARY_OP_INPLACE_ADD_UNICODE; family(BINARY_SUBSCR, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = { BINARY_SUBSCR_DICT, @@ -776,8 +859,7 @@ dummy_func( err = PyObject_SetItem(PyStackRef_AsPyObjectBorrow(container), slice, PyStackRef_AsPyObjectBorrow(v)); Py_DECREF(slice); } - PyStackRef_CLOSE(v); - PyStackRef_CLOSE(container); + DECREF_INPUTS(); ERROR_IF(err, error); } @@ -959,10 +1041,10 @@ dummy_func( PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); assert(old_value != NULL); UNLOCK_OBJECT(list); // unlock before decrefs! - Py_DECREF(old_value); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); DEAD(sub_st); PyStackRef_CLOSE(list_st); + Py_DECREF(old_value); } inst(STORE_SUBSCR_DICT, (unused/1, value, dict_st, sub -- )) { @@ -1018,7 +1100,7 @@ dummy_func( } tier1 inst(INTERPRETER_EXIT, (retval --)) { - assert(frame == &entry_frame); + assert(frame->owner == FRAME_OWNED_BY_INTERPRETER); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous frame and return. */ tstate->current_frame = frame->previous; @@ -1033,9 +1115,7 @@ dummy_func( // retval is popped from the stack, but res // is pushed to a different frame, the callers' frame. inst(RETURN_VALUE, (retval -- res)) { - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; DEAD(retval); SAVE_STACK(); @@ -1133,7 +1213,7 @@ dummy_func( PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); PyObject *retval_o; - assert(frame != &entry_frame); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); if ((tstate->interp->eval_frame == NULL) && (Py_TYPE(receiver_o) == &PyGen_Type || Py_TYPE(receiver_o) == &PyCoro_Type) && ((PyGenObject *)receiver_o)->gi_frame_state < FRAME_EXECUTING) @@ -1206,9 +1286,7 @@ dummy_func( // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -1597,10 +1675,13 @@ dummy_func( } // res[1] because we need a pointer to res to pass it to _PyEval_LoadGlobalStackRef - op(_LOAD_GLOBAL, ( -- res[1], null if (oparg & 1))) { + op(_LOAD_GLOBAL, ( -- res[1])) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); ERROR_IF(PyStackRef_IsNull(*res), error); + } + + op(_PUSH_NULL_CONDITIONAL, ( -- null if (oparg & 1))) { null = PyStackRef_NULL; } @@ -1609,7 +1690,8 @@ dummy_func( counter/1 + globals_version/1 + builtins_version/1 + - _LOAD_GLOBAL; + _LOAD_GLOBAL + + _PUSH_NULL_CONDITIONAL; op(_GUARD_GLOBALS_VERSION, (version/1 --)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); @@ -1639,7 +1721,7 @@ dummy_func( assert(DK_IS_UNICODE(builtins_keys)); } - op(_LOAD_GLOBAL_MODULE_FROM_KEYS, (index/1, globals_keys: PyDictKeysObject* -- res, null if (oparg & 1))) { + op(_LOAD_GLOBAL_MODULE_FROM_KEYS, (index/1, globals_keys: PyDictKeysObject* -- res)) { PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEAD(globals_keys); @@ -1653,10 +1735,9 @@ dummy_func( res = PyStackRef_FromPyObjectSteal(res_o); #endif STAT_INC(LOAD_GLOBAL, hit); - null = PyStackRef_NULL; } - op(_LOAD_GLOBAL_BUILTINS_FROM_KEYS, (index/1, builtins_keys: PyDictKeysObject* -- res, null if (oparg & 1))) { + op(_LOAD_GLOBAL_BUILTINS_FROM_KEYS, (index/1, builtins_keys: PyDictKeysObject* -- res)) { PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEAD(builtins_keys); @@ -1670,20 +1751,21 @@ dummy_func( res = PyStackRef_FromPyObjectSteal(res_o); #endif STAT_INC(LOAD_GLOBAL, hit); - null = PyStackRef_NULL; } macro(LOAD_GLOBAL_MODULE) = unused/1 + // Skip over the counter _GUARD_GLOBALS_VERSION_PUSH_KEYS + unused/1 + // Skip over the builtins version - _LOAD_GLOBAL_MODULE_FROM_KEYS; + _LOAD_GLOBAL_MODULE_FROM_KEYS + + _PUSH_NULL_CONDITIONAL; macro(LOAD_GLOBAL_BUILTIN) = unused/1 + // Skip over the counter _GUARD_GLOBALS_VERSION + _GUARD_BUILTINS_VERSION_PUSH_KEYS + - _LOAD_GLOBAL_BUILTINS_FROM_KEYS; + _LOAD_GLOBAL_BUILTINS_FROM_KEYS + + _PUSH_NULL_CONDITIONAL; inst(DELETE_FAST, (--)) { _PyStackRef v = GETLOCAL(oparg); @@ -1787,16 +1869,20 @@ dummy_func( } inst(BUILD_TUPLE, (values[oparg] -- tup)) { - PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); + PyObject *tup_o = _PyTuple_FromStackRefStealOnSuccess(values, oparg); + if (tup_o == NULL) { + ERROR_NO_POP(); + } INPUTS_DEAD(); - ERROR_IF(tup_o == NULL, error); tup = PyStackRef_FromPyObjectSteal(tup_o); } inst(BUILD_LIST, (values[oparg] -- list)) { - PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); + PyObject *list_o = _PyList_FromStackRefStealOnSuccess(values, oparg); + if (list_o == NULL) { + ERROR_NO_POP(); + } INPUTS_DEAD(); - ERROR_IF(list_o == NULL, error); list = PyStackRef_FromPyObjectSteal(list_o); } @@ -1840,9 +1926,8 @@ dummy_func( if (err == 0) { err = PySet_Add(set_o, PyStackRef_AsPyObjectBorrow(values[i])); } - PyStackRef_CLOSE(values[i]); } - DEAD(values); + DECREF_INPUTS(); if (err != 0) { Py_DECREF(set_o); ERROR_IF(true, error); @@ -1959,7 +2044,7 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION_FT */ } - tier1 op(_LOAD_SUPER_ATTR, (global_super_st, class_st, self_st -- attr, null if (oparg & 1))) { + tier1 op(_LOAD_SUPER_ATTR, (global_super_st, class_st, self_st -- attr)) { PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); PyObject *class = PyStackRef_AsPyObjectBorrow(class_st); PyObject *self = PyStackRef_AsPyObjectBorrow(self_st); @@ -2001,12 +2086,11 @@ dummy_func( Py_DECREF(super); ERROR_IF(attr_o == NULL, error); attr = PyStackRef_FromPyObjectSteal(attr_o); - null = PyStackRef_NULL; } - macro(LOAD_SUPER_ATTR) = _SPECIALIZE_LOAD_SUPER_ATTR + _LOAD_SUPER_ATTR; + macro(LOAD_SUPER_ATTR) = _SPECIALIZE_LOAD_SUPER_ATTR + _LOAD_SUPER_ATTR + _PUSH_NULL_CONDITIONAL; - inst(LOAD_SUPER_ATTR_ATTR, (unused/1, global_super_st, class_st, self_st -- attr_st, unused if (0))) { + inst(LOAD_SUPER_ATTR_ATTR, (unused/1, global_super_st, class_st, self_st -- attr_st)) { PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); PyObject *class = PyStackRef_AsPyObjectBorrow(class_st); PyObject *self = PyStackRef_AsPyObjectBorrow(self_st); @@ -2036,11 +2120,8 @@ dummy_func( int method_found = 0; PyObject *attr_o = _PySuper_Lookup(cls, self, name, Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); - PyStackRef_CLOSE(global_super_st); - PyStackRef_CLOSE(class_st); if (attr_o == NULL) { - PyStackRef_CLOSE(self_st); - ERROR_IF(true, error); + ERROR_NO_POP(); } if (method_found) { self_or_null = self_st; // transfer ownership @@ -2049,6 +2130,7 @@ dummy_func( PyStackRef_CLOSE(self_st); self_or_null = PyStackRef_NULL; } + DECREF_INPUTS(); attr = PyStackRef_FromPyObjectSteal(attr_o); } @@ -2082,7 +2164,7 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION_FT */ } - op(_LOAD_ATTR, (owner -- attr, self_or_null if (oparg & 1))) { + op(_LOAD_ATTR, (owner -- attr, self_or_null[oparg&1])) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); PyObject *attr_o; if (oparg & 1) { @@ -2095,7 +2177,7 @@ dummy_func( meth | self | arg1 | ... | argN */ assert(attr_o != NULL); // No errors on this branch - self_or_null = owner; // Transfer ownership + self_or_null[0] = owner; // Transfer ownership DEAD(owner); } else { @@ -2107,7 +2189,7 @@ dummy_func( */ DECREF_INPUTS(); ERROR_IF(attr_o == NULL, error); - self_or_null = PyStackRef_NULL; + self_or_null[0] = PyStackRef_NULL; } } else { @@ -2115,12 +2197,11 @@ dummy_func( attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name); DECREF_INPUTS(); ERROR_IF(attr_o == NULL, error); - /* We need to define self_or_null on all paths */ - self_or_null = PyStackRef_NULL; } attr = PyStackRef_FromPyObjectSteal(attr_o); } + macro(LOAD_ATTR) = _SPECIALIZE_LOAD_ATTR + unused/8 + @@ -2147,19 +2228,23 @@ dummy_func( PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_dictoffset < 0); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid); + DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(_PyObject_InlineValues(owner_o)->valid)); } - split op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); - PyObject *attr_o = *value_ptr; + PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr); DEOPT_IF(attr_o == NULL); + #ifdef Py_GIL_DISABLED + if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) { + DEOPT_IF(true); + } + #else + attr = PyStackRef_FromPyObjectNew(attr_o); + #endif STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - null = PyStackRef_NULL; - attr = PyStackRef_FromPyObjectSteal(attr_o); - DECREF_INPUTS(); + PyStackRef_CLOSE(owner); } macro(LOAD_ATTR_INSTANCE_VALUE) = @@ -2167,7 +2252,8 @@ dummy_func( _GUARD_TYPE_VERSION + _CHECK_MANAGED_OBJECT_HAS_VALUES + _LOAD_ATTR_INSTANCE_VALUE + - unused/5; // Skip over rest of cache + unused/5 + + _PUSH_NULL_CONDITIONAL; op(_CHECK_ATTR_MODULE_PUSH_KEYS, (dict_version/2, owner -- owner, mod_keys: PyDictKeysObject *)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); @@ -2179,14 +2265,13 @@ dummy_func( mod_keys = keys; } - op(_LOAD_ATTR_MODULE_FROM_KEYS, (index/1, owner, mod_keys: PyDictKeysObject * -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_MODULE_FROM_KEYS, (index/1, owner, mod_keys: PyDictKeysObject * -- attr)) { assert(mod_keys->dk_kind == DICT_KEYS_UNICODE); assert(index < FT_ATOMIC_LOAD_SSIZE_RELAXED(mod_keys->dk_nentries)); PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(mod_keys) + index; PyObject *attr_o = FT_ATOMIC_LOAD_PTR_RELAXED(ep->me_value); - DEAD(mod_keys); // Clear mod_keys from stack in case we need to deopt - POP_DEAD_INPUTS(); + POP_INPUT(mod_keys); DEOPT_IF(attr_o == NULL); #ifdef Py_GIL_DISABLED int increfed = _Py_TryIncrefCompareStackRef(&ep->me_value, attr_o, &attr); @@ -2198,7 +2283,6 @@ dummy_func( attr = PyStackRef_FromPyObjectSteal(attr_o); #endif STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; PyStackRef_CLOSE(owner); } @@ -2206,33 +2290,53 @@ dummy_func( unused/1 + _CHECK_ATTR_MODULE_PUSH_KEYS + _LOAD_ATTR_MODULE_FROM_KEYS + - unused/5; + unused/5 + + _PUSH_NULL_CONDITIONAL; - op(_CHECK_ATTR_WITH_HINT, (owner -- owner)) { + op(_CHECK_ATTR_WITH_HINT, (owner -- owner, dict: PyDictObject *)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictObject *dict = _PyObject_GetManagedDict(owner_o); - EXIT_IF(dict == NULL); - assert(PyDict_CheckExact((PyObject *)dict)); + PyDictObject *dict_o = _PyObject_GetManagedDict(owner_o); + EXIT_IF(dict_o == NULL); + assert(PyDict_CheckExact((PyObject *)dict_o)); + dict = dict_o; } - op(_LOAD_ATTR_WITH_HINT, (hint/1, owner -- attr, null if (oparg & 1))) { - PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); + op(_LOAD_ATTR_WITH_HINT, (hint/1, owner, dict: PyDictObject * -- attr)) { PyObject *attr_o; + if (!LOCK_OBJECT(dict)) { + POP_INPUT(dict); + DEOPT_IF(true); + } - PyDictObject *dict = _PyObject_GetManagedDict(owner_o); - DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries); + if (hint >= (size_t)dict->ma_keys->dk_nentries) { + UNLOCK_OBJECT(dict); + POP_INPUT(dict); + DEOPT_IF(true); + } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); - DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys)); + if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) { + UNLOCK_OBJECT(dict); + POP_INPUT(dict); + DEOPT_IF(true); + } PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name); + if (ep->me_key != name) { + UNLOCK_OBJECT(dict); + POP_INPUT(dict); + DEOPT_IF(true); + } attr_o = ep->me_value; - DEOPT_IF(attr_o == NULL); + if (attr_o == NULL) { + UNLOCK_OBJECT(dict); + POP_INPUT(dict); + DEOPT_IF(true); + } STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - attr = PyStackRef_FromPyObjectSteal(attr_o); - null = PyStackRef_NULL; + attr = PyStackRef_FromPyObjectNew(attr_o); + UNLOCK_OBJECT(dict); + DEAD(dict); DECREF_INPUTS(); } @@ -2241,17 +2345,22 @@ dummy_func( _GUARD_TYPE_VERSION + _CHECK_ATTR_WITH_HINT + _LOAD_ATTR_WITH_HINT + - unused/5; + unused/5 + + _PUSH_NULL_CONDITIONAL; - split op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_SLOT, (index/1, owner -- attr)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - char *addr = (char *)owner_o + index; - PyObject *attr_o = *(PyObject **)addr; + PyObject **addr = (PyObject **)((char *)owner_o + index); + PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr); DEOPT_IF(attr_o == NULL); - STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; + #ifdef Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr); + DEOPT_IF(!increfed); + #else attr = PyStackRef_FromPyObjectNew(attr_o); + #endif + STAT_INC(LOAD_ATTR, hit); DECREF_INPUTS(); } @@ -2259,21 +2368,21 @@ dummy_func( unused/1 + _GUARD_TYPE_VERSION + _LOAD_ATTR_SLOT + // NOTE: This action may also deopt - unused/5; + unused/5 + + _PUSH_NULL_CONDITIONAL; op(_CHECK_ATTR_CLASS, (type_version/2, owner -- owner)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); EXIT_IF(!PyType_Check(owner_o)); assert(type_version != 0); - EXIT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version); + EXIT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version); } - split op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr)) { STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = PyStackRef_FromPyObjectNew(descr); - null = PyStackRef_NULL; DECREF_INPUTS(); } @@ -2281,13 +2390,15 @@ dummy_func( unused/1 + _CHECK_ATTR_CLASS + unused/2 + - _LOAD_ATTR_CLASS; + _LOAD_ATTR_CLASS + + _PUSH_NULL_CONDITIONAL; macro(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK) = unused/1 + _CHECK_ATTR_CLASS + _GUARD_TYPE_VERSION + - _LOAD_ATTR_CLASS; + _LOAD_ATTR_CLASS + + _PUSH_NULL_CONDITIONAL; op(_LOAD_ATTR_PROPERTY_FRAME, (fget/4, owner -- new_frame: _PyInterpreterFrame *)) { assert((oparg & 1) == 0); @@ -2313,14 +2424,14 @@ dummy_func( _SAVE_RETURN_OFFSET + _PUSH_FRAME; - inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused, unused if (0))) { + inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert((oparg & 1) == 0); DEOPT_IF(tstate->interp->eval_frame); PyTypeObject *cls = Py_TYPE(owner_o); assert(type_version != 0); - DEOPT_IF(cls->tp_version_tag != type_version); + DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(cls->tp_version_tag) != type_version); assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); PyFunctionObject *f = (PyFunctionObject *)getattribute; assert(func_version != 0); @@ -2368,8 +2479,8 @@ dummy_func( _PyDictValues_AddToInsertionOrder(values, index); } UNLOCK_OBJECT(owner_o); - Py_XDECREF(old_value); PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); } macro(STORE_ATTR_INSTANCE_VALUE) = @@ -2413,9 +2524,9 @@ dummy_func( // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, // when dict only holds the strong reference to value in ep->me_value. - Py_XDECREF(old_value); STAT_INC(STORE_ATTR, hit); PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); } macro(STORE_ATTR_WITH_HINT) = @@ -2432,8 +2543,8 @@ dummy_func( PyObject *old_value = *(PyObject **)addr; FT_ATOMIC_STORE_PTR_RELEASE(*(PyObject **)addr, PyStackRef_AsPyObjectSteal(value)); UNLOCK_OBJECT(owner_o); - Py_XDECREF(old_value); PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); } macro(STORE_ATTR_SLOT) = @@ -2448,7 +2559,7 @@ dummy_func( }; specializing op(_SPECIALIZE_COMPARE_OP, (counter/1, left, right -- left, right)) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _Py_Specialize_CompareOp(left, right, next_instr, oparg); @@ -2456,7 +2567,7 @@ dummy_func( } OPCODE_DEFERRED_INC(COMPARE_OP); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } op(_COMPARE_OP, (left, right -- res)) { @@ -2618,7 +2729,7 @@ dummy_func( PyObject *match_o = NULL; PyObject *rest_o = NULL; - int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + int res = _PyEval_ExceptionGroupMatch(frame, exc_value, match_type, &match_o, &rest_o); DECREF_INPUTS(); ERROR_IF(res < 0, error); @@ -2670,13 +2781,26 @@ dummy_func( JUMPBY(oparg); } - tier1 op(_JUMP_BACKWARD, (the_counter/1 --)) { - assert(oparg <= INSTR_OFFSET()); - JUMPBY(-oparg); - #ifdef _Py_TIER2 - #if ENABLE_SPECIALIZATION + family(JUMP_BACKWARD, 1) = { + JUMP_BACKWARD_NO_JIT, + JUMP_BACKWARD_JIT, + }; + + tier1 op(_SPECIALIZE_JUMP_BACKWARD, (--)) { + #if ENABLE_SPECIALIZATION + if (this_instr->op.code == JUMP_BACKWARD) { + this_instr->op.code = tstate->interp->jit ? JUMP_BACKWARD_JIT : JUMP_BACKWARD_NO_JIT; + // Need to re-dispatch so the warmup counter isn't off by one: + next_instr = this_instr; + DISPATCH_SAME_OPARG(); + } + #endif + } + + tier1 op(_JIT, (--)) { + #ifdef _Py_TIER2 _Py_BackoffCounter counter = this_instr[1].counter; - if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD) { + if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD_JIT) { _Py_CODEUNIT *start = this_instr; /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ while (oparg > 255) { @@ -2699,13 +2823,25 @@ dummy_func( else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); } - #endif /* ENABLE_SPECIALIZATION */ - #endif /* _Py_TIER2 */ + #endif } macro(JUMP_BACKWARD) = + unused/1 + + _SPECIALIZE_JUMP_BACKWARD + + _CHECK_PERIODIC + + JUMP_BACKWARD_NO_INTERRUPT; + + macro(JUMP_BACKWARD_NO_JIT) = + unused/1 + + _CHECK_PERIODIC + + JUMP_BACKWARD_NO_INTERRUPT; + + macro(JUMP_BACKWARD_JIT) = + unused/1 + _CHECK_PERIODIC + - _JUMP_BACKWARD; + JUMP_BACKWARD_NO_INTERRUPT + + _JIT; pseudo(JUMP, (--)) = { JUMP_FORWARD, @@ -2794,6 +2930,7 @@ dummy_func( * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ + assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); } @@ -2874,7 +3011,6 @@ dummy_func( else { /* `iterable` is not a generator. */ PyObject *iter_o = PyObject_GetIter(iterable_o); - DEAD(iterable); if (iter_o == NULL) { ERROR_NO_POP(); } @@ -2924,10 +3060,8 @@ dummy_func( /* iterator ended normally */ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - PyStackRef_CLOSE(iter); - STACK_SHRINK(1); - /* Jump forward oparg, then skip following END_FOR and POP_TOP instruction */ - JUMPBY(oparg + 2); + /* Jump forward oparg, then skip following END_FOR */ + JUMPBY(oparg + 1); DISPATCH(); } next = PyStackRef_FromPyObjectSteal(next_o); @@ -2957,12 +3091,14 @@ dummy_func( macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER; + inst(INSTRUMENTED_FOR_ITER, (unused/1 -- )) { _PyStackRef iter_stackref = TOP(); PyObject *iter = PyStackRef_AsPyObjectBorrow(iter_stackref); PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); if (next != NULL) { PUSH(PyStackRef_FromPyObjectSteal(next)); + INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); } else { if (_PyErr_Occurred(tstate)) { @@ -2976,14 +3112,12 @@ dummy_func( /* iterator ended normally */ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - STACK_SHRINK(1); - PyStackRef_CLOSE(iter_stackref); - /* Skip END_FOR and POP_TOP */ - _Py_CODEUNIT *target = next_instr + oparg + 2; - INSTRUMENTED_JUMP(this_instr, target, PY_MONITORING_EVENT_BRANCH_RIGHT); + /* Skip END_FOR */ + JUMPBY(oparg + 1); } } + op(_ITER_CHECK_LIST, (iter -- iter)) { EXIT_IF(Py_TYPE(PyStackRef_AsPyObjectBorrow(iter)) != &PyListIter_Type); } @@ -3002,10 +3136,8 @@ dummy_func( Py_DECREF(seq); } #endif - PyStackRef_CLOSE(iter); - STACK_SHRINK(1); - /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */ - JUMPBY(oparg + 2); + /* Jump forward oparg, then skip following END_FOR instruction */ + JUMPBY(oparg + 1); DISPATCH(); } } @@ -3054,10 +3186,8 @@ dummy_func( it->it_seq = NULL; Py_DECREF(seq); } - PyStackRef_CLOSE(iter); - STACK_SHRINK(1); - /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */ - JUMPBY(oparg + 2); + /* Jump forward oparg, then skip following END_FOR instruction */ + JUMPBY(oparg + 1); DISPATCH(); } } @@ -3098,10 +3228,8 @@ dummy_func( assert(Py_TYPE(r) == &PyRangeIter_Type); STAT_INC(FOR_ITER, hit); if (r->len <= 0) { - STACK_SHRINK(1); - PyStackRef_CLOSE(iter); - // Jump over END_FOR and POP_TOP instructions. - JUMPBY(oparg + 2); + // Jump over END_FOR instruction. + JUMPBY(oparg + 1); DISPATCH(); } } @@ -3247,16 +3375,18 @@ dummy_func( op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid); + PyDictValues *ivs = _PyObject_InlineValues(owner_o); + DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(ivs->valid)); } op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner)) { PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; - DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version); + PyDictKeysObject *keys = owner_heap_type->ht_cached_keys; + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version); } - split op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self if (1))) { + op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self)) { assert(oparg & 1); /* Cached method object */ STAT_INC(LOAD_ATTR, hit); @@ -3274,7 +3404,7 @@ dummy_func( _GUARD_KEYS_VERSION + _LOAD_ATTR_METHOD_WITH_VALUES; - op(_LOAD_ATTR_METHOD_NO_DICT, (descr/4, owner -- attr, self if (1))) { + op(_LOAD_ATTR_METHOD_NO_DICT, (descr/4, owner -- attr, self)) { assert(oparg & 1); assert(Py_TYPE(PyStackRef_AsPyObjectBorrow(owner))->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); @@ -3291,7 +3421,7 @@ dummy_func( unused/2 + _LOAD_ATTR_METHOD_NO_DICT; - op(_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, (descr/4, owner -- attr, unused if (0))) { + op(_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, (descr/4, owner -- attr)) { assert((oparg & 1) == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); @@ -3306,7 +3436,7 @@ dummy_func( _GUARD_KEYS_VERSION + _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES; - op(_LOAD_ATTR_NONDESCRIPTOR_NO_DICT, (descr/4, owner -- attr, unused if (0))) { + op(_LOAD_ATTR_NONDESCRIPTOR_NO_DICT, (descr/4, owner -- attr)) { assert((oparg & 1) == 0); assert(Py_TYPE(PyStackRef_AsPyObjectBorrow(owner))->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); @@ -3323,12 +3453,12 @@ dummy_func( op(_CHECK_ATTR_METHOD_LAZY_DICT, (dictoffset/1, owner -- owner)) { char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset; - PyObject *dict = *(PyObject **)ptr; + PyObject *dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*(PyObject **)ptr); /* This object has a __dict__, just not yet created */ DEOPT_IF(dict != NULL); } - op(_LOAD_ATTR_METHOD_LAZY_DICT, (descr/4, owner -- attr, self if (1))) { + op(_LOAD_ATTR_METHOD_LAZY_DICT, (descr/4, owner -- attr, self)) { assert(oparg & 1); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); @@ -3400,8 +3530,9 @@ dummy_func( // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } // Check if the call can be inlined or not @@ -3413,7 +3544,7 @@ dummy_func( PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, total_args, NULL, frame + arguments, total_args, NULL, frame ); // Manipulate stack directly since we leave using DISPATCH_INLINED(). SYNC_SP(); @@ -3426,13 +3557,9 @@ dummy_func( DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { - PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); + DECREF_INPUTS(); ERROR_IF(true, error); } PyObject *res_o = PyObject_Vectorcall( @@ -3442,7 +3569,7 @@ dummy_func( STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); if (opcode == INSTRUMENTED_CALL) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); + &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(arguments[0]); if (res_o == NULL) { _Py_call_instrumentation_exc2( tstate, PY_MONITORING_EVENT_C_RAISE, @@ -3458,11 +3585,7 @@ dummy_func( } } assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -3547,15 +3670,14 @@ dummy_func( EXIT_IF(!PyStackRef_IsNull(null[0])); } - op(_EXPAND_METHOD, (callable[1], null[1], unused[oparg] -- method[1], self[1], unused[oparg])) { + op(_EXPAND_METHOD, (callable[1], self_or_null[1], unused[oparg] -- callable[1], self_or_null[1], unused[oparg])) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - assert(PyStackRef_IsNull(null[0])); - DEAD(null); + assert(PyStackRef_IsNull(self_or_null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); PyStackRef_CLOSE(temp); } @@ -3582,12 +3704,13 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -3598,11 +3721,7 @@ dummy_func( NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -3619,13 +3738,13 @@ dummy_func( EXIT_IF(Py_TYPE(PyStackRef_AsPyObjectBorrow(callable[0])) != &PyMethod_Type); } - op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable[1], null[1], unused[oparg] -- func[1], self[1], unused[oparg])) { - DEAD(null); + op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable[1], self_or_null[1], unused[oparg] -- callable[1], self_or_null[1], unused[oparg])) { + assert(PyStackRef_IsNull(self_or_null[0])); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); STAT_INC(CALL, hit); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); PyStackRef_CLOSE(temp); } @@ -3827,29 +3946,24 @@ dummy_func( op(_CALL_BUILTIN_CLASS, (callable[1], self_or_null[1], args[oparg] -- res)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - + DEOPT_IF(!PyType_Check(callable_o)); + PyTypeObject *tp = (PyTypeObject *)callable_o; int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } - DEAD(self_or_null); - DEOPT_IF(!PyType_Check(callable_o)); - PyTypeObject *tp = (PyTypeObject *)callable_o; DEOPT_IF(tp->tp_vectorcall == NULL); STAT_INC(CALL, hit); - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); } PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -3901,17 +4015,17 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } - DEAD(self_or_null); DEOPT_IF(!PyCFunction_CheckExact(callable_o)); DEOPT_IF(PyCFunction_GET_FLAGS(callable_o) != METH_FASTCALL); STAT_INC(CALL, hit); PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable_o); /* res = func(self, args, nargs) */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -3922,12 +4036,7 @@ dummy_func( total_args); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -3943,34 +4052,28 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } DEOPT_IF(!PyCFunction_CheckExact(callable_o)); DEOPT_IF(PyCFunction_GET_FLAGS(callable_o) != (METH_FASTCALL | METH_KEYWORDS)); STAT_INC(CALL, hit); - /* res = func(self, args, nargs, kwnames) */ + /* res = func(self, arguments, nargs, kwnames) */ PyCFunctionFastWithKeywords cfunc = (PyCFunctionFastWithKeywords)(void(*)(void)) PyCFunction_GET_FUNCTION(callable_o); - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); } PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); - assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4005,8 +4108,10 @@ dummy_func( if (res_o == NULL) { GOTO_ERROR(error); } - PyStackRef_CLOSE(callable[0]); PyStackRef_CLOSE(arg_stackref); + DEAD(args); + DEAD(self_or_null); + PyStackRef_CLOSE(callable[0]); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4015,25 +4120,24 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } DEOPT_IF(total_args != 2); PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable_o != interp->callable_cache.isinstance); STAT_INC(CALL, hit); - _PyStackRef cls_stackref = args[1]; - _PyStackRef inst_stackref = args[0]; + _PyStackRef cls_stackref = arguments[1]; + _PyStackRef inst_stackref = arguments[0]; int retval = PyObject_IsInstance(PyStackRef_AsPyObjectBorrow(inst_stackref), PyStackRef_AsPyObjectBorrow(cls_stackref)); if (retval < 0) { ERROR_NO_POP(); } res = retval ? PyStackRef_True : PyStackRef_False; assert((!PyStackRef_IsNull(res)) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(inst_stackref); - PyStackRef_CLOSE(cls_stackref); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); } // This is secretly a super-instruction @@ -4065,8 +4169,9 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } @@ -4077,8 +4182,8 @@ dummy_func( EXIT_IF(meth->ml_flags != METH_O); // CPython promises to check all non-vectorcall function calls. EXIT_IF(tstate->c_recursion_remaining <= 0); - _PyStackRef arg_stackref = args[1]; - _PyStackRef self_stackref = args[0]; + _PyStackRef arg_stackref = arguments[1]; + _PyStackRef self_stackref = arguments[0]; EXIT_IF(!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), method->d_common.d_type)); STAT_INC(CALL, hit); @@ -4089,11 +4194,7 @@ dummy_func( PyStackRef_AsPyObjectBorrow(arg_stackref)); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(self_stackref); - PyStackRef_CLOSE(arg_stackref); - DEAD(args); - DEAD(self_or_null); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4108,8 +4209,9 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -4117,12 +4219,12 @@ dummy_func( PyMethodDef *meth = method->d_method; EXIT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS)); PyTypeObject *d_type = method->d_common.d_type; - PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *self = PyStackRef_AsPyObjectBorrow(arguments[0]); EXIT_IF(!Py_IS_TYPE(self, d_type)); STAT_INC(CALL, hit); int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -4132,13 +4234,7 @@ dummy_func( PyObject *res_o = cfunc(self, (args_o + 1), nargs, NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4192,8 +4288,9 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -4201,12 +4298,12 @@ dummy_func( EXIT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type)); PyMethodDef *meth = method->d_method; EXIT_IF(meth->ml_flags != METH_FASTCALL); - PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *self = PyStackRef_AsPyObjectBorrow(arguments[0]); EXIT_IF(!Py_IS_TYPE(self, method->d_common.d_type)); STAT_INC(CALL, hit); int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -4216,13 +4313,7 @@ dummy_func( PyObject *res_o = cfunc(self, (args_o + 1), nargs); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - - /* Clear the stack of the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4274,8 +4365,9 @@ dummy_func( // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); @@ -4288,7 +4380,7 @@ dummy_func( PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, positional_args, kwnames_o, frame + arguments, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); // Sync stack explicitly since we leave using DISPATCH_INLINED(). @@ -4303,7 +4395,7 @@ dummy_func( DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -4315,7 +4407,7 @@ dummy_func( STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); if (opcode == INSTRUMENTED_CALL_KW) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); + &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(arguments[0]); if (res_o == NULL) { _Py_call_instrumentation_exc2( tstate, PY_MONITORING_EVENT_C_RAISE, @@ -4330,13 +4422,7 @@ dummy_func( } } } - PyStackRef_CLOSE(kwnames); - assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4346,8 +4432,9 @@ dummy_func( // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); @@ -4357,7 +4444,7 @@ dummy_func( PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *temp = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, positional_args, kwnames_o, frame + arguments, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); // The frame has stolen all the arguments from the stack, @@ -4394,15 +4481,14 @@ dummy_func( EXIT_IF(!PyStackRef_IsNull(null[0])); } - op(_EXPAND_METHOD_KW, (callable[1], null[1], unused[oparg], unused -- method[1], self[1], unused[oparg], unused)) { + op(_EXPAND_METHOD_KW, (callable[1], self_or_null[1], unused[oparg], unused -- callable[1], self_or_null[1], unused[oparg], unused)) { + assert(PyStackRef_IsNull(self_or_null[0])); _PyStackRef callable_s = callable[0]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); - - assert(PyStackRef_IsNull(null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); PyStackRef_CLOSE(callable_s); } @@ -4448,12 +4534,13 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -4467,11 +4554,7 @@ dummy_func( PyStackRef_CLOSE(kwnames); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - DEAD(self_or_null); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4487,10 +4570,12 @@ dummy_func( GO_TO_INSTRUCTION(CALL_FUNCTION_EX); } - op(_MAKE_CALLARGS_A_TUPLE, (func, unused, callargs, kwargs_in if (oparg & 1) -- func, unused, tuple, kwargs_out if (oparg & 1))) { + op(_MAKE_CALLARGS_A_TUPLE, (func, unused, callargs, kwargs_in -- func, unused, tuple, kwargs_out)) { PyObject *callargs_o = PyStackRef_AsPyObjectBorrow(callargs); if (PyTuple_CheckExact(callargs_o)) { tuple = callargs; + kwargs_out = kwargs_in; + DEAD(kwargs_in); DEAD(callargs); } else { @@ -4502,14 +4587,15 @@ dummy_func( if (tuple_o == NULL) { ERROR_NO_POP(); } + kwargs_out = kwargs_in; + DEAD(kwargs_in); PyStackRef_CLOSE(callargs); tuple = PyStackRef_FromPyObjectSteal(tuple_o); } - kwargs_out = kwargs_in; - DEAD(kwargs_in); } - op(_DO_CALL_FUNCTION_EX, (func_st, unused, callargs_st, kwargs_st if (oparg & 1) -- result)) { + op(_DO_CALL_FUNCTION_EX, (func_st, null, callargs_st, kwargs_st -- result)) { + (void)null; PyObject *func = PyStackRef_AsPyObjectBorrow(func_st); // DICT_MERGE is called before this opcode if there are kwargs. @@ -4580,8 +4666,8 @@ dummy_func( result_o = PyObject_Call(func, callargs, kwargs); } PyStackRef_XCLOSE(kwargs_st); - DEAD(kwargs_st); PyStackRef_CLOSE(callargs_st); + DEAD(null); PyStackRef_CLOSE(func_st); ERROR_IF(result_o == NULL, error); result = PyStackRef_FromPyObjectSteal(result_o); @@ -4643,11 +4729,10 @@ dummy_func( LLTRACE_RESUME_FRAME(); } - inst(BUILD_SLICE, (start, stop, step if (oparg == 3) -- slice)) { - PyObject *start_o = PyStackRef_AsPyObjectBorrow(start); - PyObject *stop_o = PyStackRef_AsPyObjectBorrow(stop); - PyObject *step_o = PyStackRef_AsPyObjectBorrow(step); - + inst(BUILD_SLICE, (args[oparg] -- slice)) { + PyObject *start_o = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *stop_o = PyStackRef_AsPyObjectBorrow(args[1]); + PyObject *step_o = oparg == 3 ? PyStackRef_AsPyObjectBorrow(args[2]) : NULL; PyObject *slice_o = PySlice_New(start_o, stop_o, step_o); DECREF_INPUTS(); ERROR_IF(slice_o == NULL, error); @@ -4682,8 +4767,7 @@ dummy_func( inst(FORMAT_WITH_SPEC, (value, fmt_spec -- res)) { PyObject *res_o = PyObject_Format(PyStackRef_AsPyObjectBorrow(value), PyStackRef_AsPyObjectBorrow(fmt_spec)); - PyStackRef_CLOSE(value); - PyStackRef_CLOSE(fmt_spec); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4718,14 +4802,13 @@ dummy_func( res = PyStackRef_FromPyObjectSteal(res_o); } - macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + _BINARY_OP; + macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + unused/4 + _BINARY_OP; - pure inst(SWAP, (bottom_in, unused[oparg-2], top_in -- - top_out, unused[oparg-2], bottom_out)) { - bottom_out = bottom_in; - DEAD(bottom_in); - top_out = top_in; - DEAD(top_in); + pure inst(SWAP, (bottom[1], unused[oparg-2], top[1] -- + bottom[1], unused[oparg-2], top[1])) { + _PyStackRef temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; assert(oparg >= 2); } @@ -4733,7 +4816,8 @@ dummy_func( int original_opcode = 0; if (tstate->tracing) { PyCodeObject *code = _PyFrame_GetCode(frame); - original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyFrame_GetBytecode(frame))].original_opcode; + int index = (int)(this_instr - _PyFrame_GetBytecode(frame)); + original_opcode = code->_co_monitoring->lines->data[index*code->_co_monitoring->lines->bytes_per_entry]; next_instr = this_instr; } else { original_opcode = _Py_call_instrumentation_line( @@ -4779,7 +4863,8 @@ dummy_func( } inst(INSTRUMENTED_NOT_TAKEN, ( -- )) { - INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); + (void)this_instr; // INSTRUMENTED_JUMP requires this_instr + INSTRUMENTED_JUMP(prev_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); } macro(INSTRUMENTED_JUMP_BACKWARD) = @@ -4909,7 +4994,7 @@ dummy_func( _Py_CODEUNIT *target = _PyFrame_GetBytecode(frame) + exit->target; #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("SIDE EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(", exit %u, temp %d, target %d -> %s]\n", @@ -4973,43 +5058,31 @@ dummy_func( value = PyStackRef_FromPyObjectImmortal(ptr); } - tier2 pure op(_LOAD_CONST_INLINE_WITH_NULL, (ptr/4 -- value, null)) { - value = PyStackRef_FromPyObjectNew(ptr); - null = PyStackRef_NULL; - } - - tier2 pure op(_LOAD_CONST_INLINE_BORROW_WITH_NULL, (ptr/4 -- value, null)) { - value = PyStackRef_FromPyObjectImmortal(ptr); - null = PyStackRef_NULL; - } - tier2 op(_CHECK_FUNCTION, (func_version/2 -- )) { assert(PyStackRef_FunctionCheck(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); DEOPT_IF(func->func_version != func_version); } - tier2 op(_LOAD_GLOBAL_MODULE, (index/1 -- res, null if (oparg & 1))) { + tier2 op(_LOAD_GLOBAL_MODULE, (index/1 -- res)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); PyObject *res_o = entries[index].me_value; DEOPT_IF(res_o == NULL); Py_INCREF(res_o); res = PyStackRef_FromPyObjectSteal(res_o); - null = PyStackRef_NULL; } - tier2 op(_LOAD_GLOBAL_BUILTINS, (index/1 -- res, null if (oparg & 1))) { + tier2 op(_LOAD_GLOBAL_BUILTINS, (index/1 -- res)) { PyDictObject *dict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); PyObject *res_o = entries[index].me_value; DEOPT_IF(res_o == NULL); Py_INCREF(res_o); res = PyStackRef_FromPyObjectSteal(res_o); - null = PyStackRef_NULL; } - tier2 op(_LOAD_ATTR_MODULE, (index/1, owner -- attr, null if (oparg & 1))) { + tier2 op(_LOAD_ATTR_MODULE, (index/1, owner -- attr)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner_o)->md_dict; assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); @@ -5020,24 +5093,16 @@ dummy_func( STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr_o); attr = PyStackRef_FromPyObjectSteal(attr_o); - null = PyStackRef_NULL; DECREF_INPUTS(); } - /* Internal -- for testing executors */ - op(_INTERNAL_INCREMENT_OPT_COUNTER, (opt --)) { - _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)PyStackRef_AsPyObjectBorrow(opt); - exe->count++; - DEAD(opt); - } - tier2 op(_DYNAMIC_EXIT, (exit_p/4 --)) { tstate->previous_executor = (PyObject *)current_executor; _PyExitData *exit = (_PyExitData *)exit_p; _Py_CODEUNIT *target = frame->instr_ptr; #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("DYNAMIC EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(", exit %u, temp %d, target %d -> %s]\n", @@ -5103,7 +5168,8 @@ dummy_func( EXIT_TO_TIER1(); } - tier2 op(_ERROR_POP_N, (target/2, unused[oparg] --)) { + tier2 op(_ERROR_POP_N, (target/2 --)) { + assert(oparg == 0); frame->instr_ptr = _PyFrame_GetBytecode(frame) + target; SYNC_SP(); GOTO_UNWIND(); @@ -5121,6 +5187,151 @@ dummy_func( assert(tstate->tracing || eval_breaker == FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version)); } + label(pop_4_error) { + STACK_SHRINK(4); + goto error; + } + + label(pop_3_error) { + STACK_SHRINK(3); + goto error; + } + + label(pop_2_error) { + STACK_SHRINK(2); + goto error; + } + + label(pop_1_error) { + STACK_SHRINK(1); + goto error; + } + + label(error) { + /* Double-check exception status. */ +#ifdef NDEBUG + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_SystemError, + "error return without exception set"); + } +#else + assert(_PyErr_Occurred(tstate)); +#endif + + /* Log traceback info. */ + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + if (!_PyFrame_IsIncomplete(frame)) { + PyFrameObject *f = _PyFrame_GetFrameObject(frame); + if (f != NULL) { + PyTraceBack_Here(f); + } + } + _PyEval_MonitorRaise(tstate, frame, next_instr-1); + goto exception_unwind; + } + + label(exception_unwind) { + /* We can't use frame->instr_ptr here, as RERAISE may have set it */ + int offset = INSTR_OFFSET()-1; + int level, handler, lasti; + if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { + // No handlers, so exit. + assert(_PyErr_Occurred(tstate)); + + /* Pop remaining stack entries. */ + _PyStackRef *stackbase = _PyFrame_Stackbase(frame); + while (stack_pointer > stackbase) { + PyStackRef_XCLOSE(POP()); + } + assert(STACK_LEVEL() == 0); + _PyFrame_SetStackPointer(frame, stack_pointer); + monitor_unwind(tstate, frame, next_instr-1); + goto exit_unwind; + } + + assert(STACK_LEVEL() >= level); + _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; + while (stack_pointer > new_top) { + PyStackRef_XCLOSE(POP()); + } + if (lasti) { + int frame_lasti = _PyInterpreterFrame_LASTI(frame); + PyObject *lasti = PyLong_FromLong(frame_lasti); + if (lasti == NULL) { + goto exception_unwind; + } + PUSH(PyStackRef_FromPyObjectSteal(lasti)); + } + + /* Make the raw exception data + available to the handler, + so a program can emulate the + Python main loop. */ + PyObject *exc = _PyErr_GetRaisedException(tstate); + PUSH(PyStackRef_FromPyObjectSteal(exc)); + next_instr = _PyFrame_GetBytecode(frame) + handler; + + if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + goto exception_unwind; + } + /* Resume normal execution */ +#ifdef LLTRACE + if (frame->lltrace >= 5) { + lltrace_resume_frame(frame); + } +#endif + DISPATCH(); + } + + label(exit_unwind) { + assert(_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = tstate->current_frame = dying->previous; + _PyEval_FrameClearAndPop(tstate, dying); + frame->return_offset = 0; + if (frame->owner == FRAME_OWNED_BY_INTERPRETER) { + /* Restore previous frame and exit */ + tstate->current_frame = frame->previous; + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; + return NULL; + } + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + goto error; + } + + label(start_frame) { + if (_Py_EnterRecursivePy(tstate)) { + goto exit_unwind; + } + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + + #ifdef LLTRACE + { + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); + frame->lltrace = lltrace; + if (lltrace < 0) { + goto exit_unwind; + } + } + #endif + + #ifdef Py_DEBUG + /* _PyEval_EvalFrameDefault() must not be called with an exception set, + because it can clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!_PyErr_Occurred(tstate)); + #endif + + DISPATCH(); + } + + + // END BYTECODES // } @@ -5130,7 +5341,6 @@ dummy_func( exit_unwind: handle_eval_breaker: resume_frame: - resume_with_error: start_frame: unbound_local_error: ; diff --git a/Python/ceval.c b/Python/ceval.c index e92a11b16cec81..11518684c136bd 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -27,6 +27,7 @@ #include "pycore_range.h" // _PyRangeIterObject #include "pycore_setobject.h" // _PySet_Update() #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs +#include "pycore_traceback.h" // _PyTraceBack_FromFrame #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_uop_ids.h" // Uops #include "pycore_pyerrors.h" @@ -178,7 +179,7 @@ lltrace_instruction(_PyInterpreterFrame *frame, int opcode, int oparg) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { return; } dump_stack(frame, stack_pointer); @@ -229,12 +230,12 @@ lltrace_resume_frame(_PyInterpreterFrame *frame) } static int -maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, _PyInterpreterFrame *skip_frame, PyObject *globals) +maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, PyObject *globals) { if (globals == NULL) { return 0; } - if (frame == skip_frame) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { return 0; } int r = PyDict_Contains(globals, &_Py_ID(__lltrace__)); @@ -294,6 +295,7 @@ void Py_SetRecursionLimit(int new_limit) { PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyEval_StopTheWorld(interp); interp->ceval.recursion_limit = new_limit; _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { int depth = p->py_recursion_limit - p->py_recursion_remaining; @@ -301,6 +303,7 @@ Py_SetRecursionLimit(int new_limit) p->py_recursion_remaining = new_limit - depth; } _Py_FOR_EACH_TSTATE_END(interp); + _PyEval_StartTheWorld(interp); } /* The function _Py_EnterRecursiveCallTstate() only calls _Py_CheckRecursiveCall() @@ -764,23 +767,6 @@ _PyObjectArray_Free(PyObject **array, PyObject **scratch) #define PY_EVAL_C_STACK_UNITS 2 -/* _PyEval_EvalFrameDefault is too large to optimize for speed with PGO on MSVC - when the JIT is enabled or GIL is disabled. Disable that optimization around - this function only. If this is fixed upstream, we should gate this on the - version of MSVC. - */ -#if (defined(_MSC_VER) && \ - defined(_Py_USING_PGO) && \ - (defined(_Py_JIT) || \ - defined(Py_GIL_DISABLED))) -#define DO_NOT_OPTIMIZE_INTERP_LOOP -#endif - -#ifdef DO_NOT_OPTIMIZE_INTERP_LOOP -# pragma optimize("t", off) -/* This setting is reversed below following _PyEval_EvalFrameDefault */ -#endif - PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) { @@ -797,13 +783,19 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif uint8_t opcode; /* Current opcode */ int oparg; /* Current opcode argument, if any */ -#ifdef LLTRACE - int lltrace = 0; -#endif _PyInterpreterFrame entry_frame; + if (_Py_EnterRecursiveCallTstate(tstate, "")) { + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + _PyEval_FrameClearAndPop(tstate, frame); + return NULL; + } + /* Local "register" variables. + * These are cached values from the frame and code object. */ + _Py_CODEUNIT *next_instr; + _PyStackRef *stack_pointer; #if defined(Py_DEBUG) && !defined(Py_STACKREF_DEBUG) /* Set these to invalid but identifiable values for debugging. */ @@ -816,220 +808,56 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int entry_frame.f_executable = PyStackRef_None; entry_frame.instr_ptr = (_Py_CODEUNIT *)_Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS + 1; entry_frame.stackpointer = entry_frame.localsplus; - entry_frame.owner = FRAME_OWNED_BY_CSTACK; + entry_frame.owner = FRAME_OWNED_BY_INTERPRETER; entry_frame.visited = 0; entry_frame.return_offset = 0; +#ifdef LLTRACE + entry_frame.lltrace = 0; +#endif /* Push frame */ entry_frame.previous = tstate->current_frame; frame->previous = &entry_frame; tstate->current_frame = frame; tstate->c_recursion_remaining -= (PY_EVAL_C_STACK_UNITS - 1); - if (_Py_EnterRecursiveCallTstate(tstate, "")) { - tstate->c_recursion_remaining--; - tstate->py_recursion_remaining--; - goto exit_unwind; - } /* support for generator.throw() */ if (throwflag) { if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; + goto early_exit; } - /* Because this avoids the RESUME, - * we need to update instrumentation */ #ifdef Py_GIL_DISABLED /* Load thread-local bytecode */ if (frame->tlbc_index != ((_PyThreadStateImpl *)tstate)->tlbc_index) { _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); if (bytecode == NULL) { - goto error; + goto early_exit; } ptrdiff_t off = frame->instr_ptr - _PyFrame_GetBytecode(frame); frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index; frame->instr_ptr = bytecode + off; } #endif + /* Because this avoids the RESUME, we need to update instrumentation */ _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp); - monitor_throw(tstate, frame, frame->instr_ptr); - /* TO DO -- Monitor throw entry. */ - goto resume_with_error; + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + monitor_throw(tstate, frame, next_instr); + goto error; } - /* Local "register" variables. - * These are cached values from the frame and code object. */ - _Py_CODEUNIT *next_instr; - _PyStackRef *stack_pointer; - #if defined(_Py_TIER2) && !defined(_Py_JIT) /* Tier 2 interpreter state */ _PyExecutorObject *current_executor = NULL; const _PyUOpInstruction *next_uop = NULL; #endif -start_frame: - if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; - } - - next_instr = frame->instr_ptr; -resume_frame: - stack_pointer = _PyFrame_GetStackPointer(frame); - -#ifdef LLTRACE - lltrace = maybe_lltrace_resume_frame(frame, &entry_frame, GLOBALS()); - if (lltrace < 0) { - goto exit_unwind; - } -#endif - -#ifdef Py_DEBUG - /* _PyEval_EvalFrameDefault() must not be called with an exception set, - because it can clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!_PyErr_Occurred(tstate)); -#endif - - DISPATCH(); - - { - /* Start instructions */ -#if !USE_COMPUTED_GOTOS - dispatch_opcode: - switch (opcode) -#endif - { + goto start_frame; #include "generated_cases.c.h" -#if USE_COMPUTED_GOTOS - _unknown_opcode: -#else - EXTRA_CASES // From pycore_opcode_metadata.h, a 'case' for each unused opcode -#endif - /* Tell C compilers not to hold the opcode variable in the loop. - next_instr points the current instruction without TARGET(). */ - opcode = next_instr->op.code; - _PyErr_Format(tstate, PyExc_SystemError, - "%U:%d: unknown opcode %d", - _PyFrame_GetCode(frame)->co_filename, - PyUnstable_InterpreterFrame_GetLine(frame), - opcode); - goto error; - - } /* End instructions */ - - /* This should never be reached. Every opcode should end with DISPATCH() - or goto error. */ - Py_UNREACHABLE(); - -pop_4_error: - STACK_SHRINK(1); -pop_3_error: - STACK_SHRINK(1); -pop_2_error: - STACK_SHRINK(1); -pop_1_error: - STACK_SHRINK(1); -error: - /* Double-check exception status. */ -#ifdef NDEBUG - if (!_PyErr_Occurred(tstate)) { - _PyErr_SetString(tstate, PyExc_SystemError, - "error return without exception set"); - } -#else - assert(_PyErr_Occurred(tstate)); -#endif - - /* Log traceback info. */ - assert(frame != &entry_frame); - if (!_PyFrame_IsIncomplete(frame)) { - PyFrameObject *f = _PyFrame_GetFrameObject(frame); - if (f != NULL) { - PyTraceBack_Here(f); - } - } - _PyEval_MonitorRaise(tstate, frame, next_instr-1); -exception_unwind: - { - /* We can't use frame->instr_ptr here, as RERAISE may have set it */ - int offset = INSTR_OFFSET()-1; - int level, handler, lasti; - if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { - // No handlers, so exit. - assert(_PyErr_Occurred(tstate)); - - /* Pop remaining stack entries. */ - _PyStackRef *stackbase = _PyFrame_Stackbase(frame); - while (stack_pointer > stackbase) { - PyStackRef_XCLOSE(POP()); - } - assert(STACK_LEVEL() == 0); - _PyFrame_SetStackPointer(frame, stack_pointer); - monitor_unwind(tstate, frame, next_instr-1); - goto exit_unwind; - } - - assert(STACK_LEVEL() >= level); - _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; - while (stack_pointer > new_top) { - PyStackRef_XCLOSE(POP()); - } - if (lasti) { - int frame_lasti = _PyInterpreterFrame_LASTI(frame); - PyObject *lasti = PyLong_FromLong(frame_lasti); - if (lasti == NULL) { - goto exception_unwind; - } - PUSH(PyStackRef_FromPyObjectSteal(lasti)); - } - - /* Make the raw exception data - available to the handler, - so a program can emulate the - Python main loop. */ - PyObject *exc = _PyErr_GetRaisedException(tstate); - PUSH(PyStackRef_FromPyObjectSteal(exc)); - next_instr = _PyFrame_GetBytecode(frame) + handler; - - if (monitor_handled(tstate, frame, next_instr, exc) < 0) { - goto exception_unwind; - } - /* Resume normal execution */ -#ifdef LLTRACE - if (lltrace >= 5) { - lltrace_resume_frame(frame); - } -#endif - DISPATCH(); - } - } - -exit_unwind: - assert(_PyErr_Occurred(tstate)); - _Py_LeaveRecursiveCallPy(tstate); - assert(frame != &entry_frame); - // GH-99729: We need to unlink the frame *before* clearing it: - _PyInterpreterFrame *dying = frame; - frame = tstate->current_frame = dying->previous; - _PyEval_FrameClearAndPop(tstate, dying); - frame->return_offset = 0; - if (frame == &entry_frame) { - /* Restore previous frame and exit */ - tstate->current_frame = frame->previous; - tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; - return NULL; - } - -resume_with_error: - next_instr = frame->instr_ptr; - stack_pointer = _PyFrame_GetStackPointer(frame); - goto error; - - #ifdef _Py_TIER2 // Tier 2 is also here! @@ -1058,13 +886,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #undef ENABLE_SPECIALIZATION_FT #define ENABLE_SPECIALIZATION_FT 0 -#ifdef Py_DEBUG - #define DPRINTF(level, ...) \ - if (lltrace >= (level)) { printf(__VA_ARGS__); } -#else - #define DPRINTF(level, ...) -#endif - ; // dummy statement after a label, before a declaration uint16_t uopcode; #ifdef Py_STATS @@ -1077,7 +898,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int for (;;) { uopcode = next_uop->opcode; #ifdef Py_DEBUG - if (lltrace >= 3) { + if (frame->lltrace >= 3) { dump_stack(frame, stack_pointer); if (next_uop->opcode == _START_EXECUTOR) { printf("%4d uop: ", 0); @@ -1119,7 +940,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int jump_to_error_target: #ifdef Py_DEBUG - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("Error: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(" @ %d -> %s]\n", @@ -1136,10 +957,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); assert(next_uop[-1].format == UOP_FORMAT_TARGET); frame->return_offset = 0; // Don't leave this random - _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(current_executor); tstate->previous_executor = NULL; - goto resume_with_error; + next_instr = frame->instr_ptr; + goto error; jump_to_jump_target: assert(next_uop[-1].format == UOP_FORMAT_JUMP); @@ -1155,7 +976,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int next_instr = next_uop[-1].target + _PyFrame_GetBytecode(frame); goto_to_tier1: #ifdef Py_DEBUG - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("DEOPT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(" -> %s]\n", @@ -1171,12 +992,22 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif // _Py_TIER2 +early_exit: + assert(_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = tstate->current_frame = dying->previous; + _PyEval_FrameClearAndPop(tstate, dying); + frame->return_offset = 0; + assert(frame->owner == FRAME_OWNED_BY_INTERPRETER); + /* Restore previous frame and exit */ + tstate->current_frame = frame->previous; + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; + return NULL; } -#ifdef DO_NOT_OPTIMIZE_INTERP_LOOP -# pragma optimize("", on) -#endif - #if defined(__GNUC__) # pragma GCC diagnostic pop #elif defined(_MSC_VER) /* MS_WINDOWS */ @@ -1522,7 +1353,12 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, u = (PyObject *)&_Py_SINGLETON(tuple_empty); } else { - u = _PyTuple_FromStackRefSteal(args + n, argcount - n); + u = _PyTuple_FromStackRefStealOnSuccess(args + n, argcount - n); + if (u == NULL) { + for (Py_ssize_t i = n; i < argcount; i++) { + PyStackRef_CLOSE(args[i]); + } + } } if (u == NULL) { goto fail_post_positional; @@ -2092,8 +1928,8 @@ do_raise(PyThreadState *tstate, PyObject *exc, PyObject *cause) */ int -_PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, - PyObject **match, PyObject **rest) +_PyEval_ExceptionGroupMatch(_PyInterpreterFrame *frame, PyObject* exc_value, + PyObject *match_type, PyObject **match, PyObject **rest) { if (Py_IsNone(exc_value)) { *match = Py_NewRef(Py_None); @@ -2119,6 +1955,15 @@ _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, if (wrapped == NULL) { return -1; } + PyFrameObject *f = _PyFrame_GetFrameObject(frame); + if (f != NULL) { + PyObject *tb = _PyTraceBack_FromFrame(NULL, f); + if (tb == NULL) { + return -1; + } + PyException_SetTraceback(wrapped, tb); + Py_DECREF(tb); + } *match = wrapped; } *rest = Py_NewRef(Py_None); diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 1f811e72406130..416eec01052224 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -995,7 +995,7 @@ _Py_unset_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit) void _Py_FinishPendingCalls(PyThreadState *tstate) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); assert(_PyThreadState_CheckConsistency(tstate)); struct _pending_calls *pending = &tstate->interp->ceval.pending; @@ -1056,7 +1056,7 @@ _PyEval_MakePendingCalls(PyThreadState *tstate) int Py_MakePendingCalls(void) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); PyThreadState *tstate = _PyThreadState_GET(); assert(_PyThreadState_CheckConsistency(tstate)); diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index f15633fa467376..c2fc38f3c18e53 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -80,7 +80,7 @@ /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */ #ifdef LLTRACE -#define PRE_DISPATCH_GOTO() if (lltrace >= 5) { \ +#define PRE_DISPATCH_GOTO() if (frame->lltrace >= 5) { \ lltrace_instruction(frame, stack_pointer, next_instr, opcode, oparg); } #else #define PRE_DISPATCH_GOTO() ((void)0) @@ -89,7 +89,8 @@ #if LLTRACE #define LLTRACE_RESUME_FRAME() \ do { \ - lltrace = maybe_lltrace_resume_frame(frame, &entry_frame, GLOBALS()); \ + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); \ + frame->lltrace = lltrace; \ if (lltrace < 0) { \ goto exit_unwind; \ } \ @@ -165,35 +166,6 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define JUMPBY(x) (next_instr += (x)) #define SKIP_OVER(x) (next_instr += (x)) -/* OpCode prediction macros - Some opcodes tend to come in pairs thus making it possible to - predict the second code when the first is run. For example, - COMPARE_OP is often followed by POP_JUMP_IF_FALSE or POP_JUMP_IF_TRUE. - - Verifying the prediction costs a single high-speed test of a register - variable against a constant. If the pairing was good, then the - processor's own internal branch predication has a high likelihood of - success, resulting in a nearly zero-overhead transition to the - next opcode. A successful prediction saves a trip through the eval-loop - including its unpredictable switch-case branch. Combined with the - processor's internal branch prediction, a successful PREDICT has the - effect of making the two opcodes run as if they were a single new opcode - with the bodies combined. - - If collecting opcode statistics, your choices are to either keep the - predictions turned-on and interpret the results as if some opcodes - had been combined or turn-off predictions so that the opcode frequency - counter updates for both opcodes. - - Opcode prediction is disabled with threaded code, since the latter allows - the CPU to record separate branch prediction information for each - opcode. - -*/ - -#define PREDICT_ID(op) PRED_##op -#define PREDICTED(op) PREDICT_ID(op): - /* Stack manipulation macros */ @@ -238,7 +210,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { #endif #define WITHIN_STACK_BOUNDS() \ - (frame == &entry_frame || (STACK_LEVEL() >= 0 && STACK_LEVEL() <= STACK_SIZE())) + (frame->owner == FRAME_OWNED_BY_INTERPRETER || (STACK_LEVEL() >= 0 && STACK_LEVEL() <= STACK_SIZE())) /* Data access macros */ #define FRAME_CO_CONSTS (_PyFrame_GetCode(frame)->co_consts) @@ -259,8 +231,6 @@ GETITEM(PyObject *v, Py_ssize_t i) { GETLOCAL(i) = value; \ PyStackRef_XCLOSE(tmp); } while (0) -#define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op) - #ifdef Py_STATS #define UPDATE_MISS_STATS(INSTNAME) \ do { \ @@ -280,7 +250,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { /* This is only a single jump on release builds! */ \ UPDATE_MISS_STATS((INSTNAME)); \ assert(_PyOpcode_Deopt[opcode] == (INSTNAME)); \ - GO_TO_INSTRUCTION(INSTNAME); \ + goto PREDICTED_##INSTNAME; \ } @@ -363,7 +333,7 @@ do { \ next_instr = dest; \ } else { \ _PyFrame_SetStackPointer(frame, stack_pointer); \ - next_instr = _Py_call_instrumentation_jump(tstate, event, frame, src, dest); \ + next_instr = _Py_call_instrumentation_jump(this_instr, tstate, event, frame, src, dest); \ stack_pointer = _PyFrame_GetStackPointer(frame); \ if (next_instr == NULL) { \ next_instr = (dest)+1; \ @@ -411,7 +381,9 @@ do { \ tstate->previous_executor = NULL; \ frame = tstate->current_frame; \ if (next_instr == NULL) { \ - goto resume_with_error; \ + next_instr = frame->instr_ptr; \ + stack_pointer = _PyFrame_GetStackPointer(frame); \ + goto error; \ } \ stack_pointer = _PyFrame_GetStackPointer(frame); \ DISPATCH(); \ diff --git a/Python/clinic/_warnings.c.h b/Python/clinic/_warnings.c.h index 9a2c33f2ea8169..bcb4b344fa4370 100644 --- a/Python/clinic/_warnings.c.h +++ b/Python/clinic/_warnings.c.h @@ -9,6 +9,40 @@ preserve #include "pycore_abstract.h" // _PyNumber_Index() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() +PyDoc_STRVAR(warnings_acquire_lock__doc__, +"_acquire_lock($module, /)\n" +"--\n" +"\n"); + +#define WARNINGS_ACQUIRE_LOCK_METHODDEF \ + {"_acquire_lock", (PyCFunction)warnings_acquire_lock, METH_NOARGS, warnings_acquire_lock__doc__}, + +static PyObject * +warnings_acquire_lock_impl(PyObject *module); + +static PyObject * +warnings_acquire_lock(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return warnings_acquire_lock_impl(module); +} + +PyDoc_STRVAR(warnings_release_lock__doc__, +"_release_lock($module, /)\n" +"--\n" +"\n"); + +#define WARNINGS_RELEASE_LOCK_METHODDEF \ + {"_release_lock", (PyCFunction)warnings_release_lock, METH_NOARGS, warnings_release_lock__doc__}, + +static PyObject * +warnings_release_lock_impl(PyObject *module); + +static PyObject * +warnings_release_lock(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return warnings_release_lock_impl(module); +} + PyDoc_STRVAR(warnings_warn__doc__, "warn($module, /, message, category=None, stacklevel=1, source=None, *,\n" " skip_file_prefixes=)\n" @@ -230,20 +264,20 @@ warnings_warn_explicit(PyObject *module, PyObject *const *args, Py_ssize_t nargs return return_value; } -PyDoc_STRVAR(warnings_filters_mutated__doc__, -"_filters_mutated($module, /)\n" +PyDoc_STRVAR(warnings_filters_mutated_lock_held__doc__, +"_filters_mutated_lock_held($module, /)\n" "--\n" "\n"); -#define WARNINGS_FILTERS_MUTATED_METHODDEF \ - {"_filters_mutated", (PyCFunction)warnings_filters_mutated, METH_NOARGS, warnings_filters_mutated__doc__}, +#define WARNINGS_FILTERS_MUTATED_LOCK_HELD_METHODDEF \ + {"_filters_mutated_lock_held", (PyCFunction)warnings_filters_mutated_lock_held, METH_NOARGS, warnings_filters_mutated_lock_held__doc__}, static PyObject * -warnings_filters_mutated_impl(PyObject *module); +warnings_filters_mutated_lock_held_impl(PyObject *module); static PyObject * -warnings_filters_mutated(PyObject *module, PyObject *Py_UNUSED(ignored)) +warnings_filters_mutated_lock_held(PyObject *module, PyObject *Py_UNUSED(ignored)) { - return warnings_filters_mutated_impl(module); + return warnings_filters_mutated_lock_held_impl(module); } -/*[clinic end generated code: output=ed02c0f521a03a37 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d9d32a8b59a30683 input=a9049054013a1b77]*/ diff --git a/Python/clinic/context.c.h b/Python/clinic/context.c.h index 997ac6f63384a9..71f05aa02a51e7 100644 --- a/Python/clinic/context.c.h +++ b/Python/clinic/context.c.h @@ -21,7 +21,7 @@ _contextvars_Context_get_impl(PyContext *self, PyObject *key, PyObject *default_value); static PyObject * -_contextvars_Context_get(PyContext *self, PyObject *const *args, Py_ssize_t nargs) +_contextvars_Context_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -36,7 +36,7 @@ _contextvars_Context_get(PyContext *self, PyObject *const *args, Py_ssize_t narg } default_value = args[1]; skip_optional: - return_value = _contextvars_Context_get_impl(self, key, default_value); + return_value = _contextvars_Context_get_impl((PyContext *)self, key, default_value); exit: return return_value; @@ -57,9 +57,9 @@ static PyObject * _contextvars_Context_items_impl(PyContext *self); static PyObject * -_contextvars_Context_items(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_items(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_items_impl(self); + return _contextvars_Context_items_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_Context_keys__doc__, @@ -75,9 +75,9 @@ static PyObject * _contextvars_Context_keys_impl(PyContext *self); static PyObject * -_contextvars_Context_keys(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_keys_impl(self); + return _contextvars_Context_keys_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_Context_values__doc__, @@ -93,9 +93,9 @@ static PyObject * _contextvars_Context_values_impl(PyContext *self); static PyObject * -_contextvars_Context_values(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_values(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_values_impl(self); + return _contextvars_Context_values_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_Context_copy__doc__, @@ -111,9 +111,9 @@ static PyObject * _contextvars_Context_copy_impl(PyContext *self); static PyObject * -_contextvars_Context_copy(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_copy_impl(self); + return _contextvars_Context_copy_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_ContextVar_get__doc__, @@ -135,7 +135,7 @@ static PyObject * _contextvars_ContextVar_get_impl(PyContextVar *self, PyObject *default_value); static PyObject * -_contextvars_ContextVar_get(PyContextVar *self, PyObject *const *args, Py_ssize_t nargs) +_contextvars_ContextVar_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *default_value = NULL; @@ -148,7 +148,7 @@ _contextvars_ContextVar_get(PyContextVar *self, PyObject *const *args, Py_ssize_ } default_value = args[0]; skip_optional: - return_value = _contextvars_ContextVar_get_impl(self, default_value); + return_value = _contextvars_ContextVar_get_impl((PyContextVar *)self, default_value); exit: return return_value; @@ -179,4 +179,4 @@ PyDoc_STRVAR(_contextvars_ContextVar_reset__doc__, #define _CONTEXTVARS_CONTEXTVAR_RESET_METHODDEF \ {"reset", (PyCFunction)_contextvars_ContextVar_reset, METH_O, _contextvars_ContextVar_reset__doc__}, -/*[clinic end generated code: output=b667826178444c3f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=444567eaf0df25e0 input=a9049054013a1b77]*/ diff --git a/Python/clinic/instruction_sequence.c.h b/Python/clinic/instruction_sequence.c.h index 45693e5856f8a7..41ab2de44e426e 100644 --- a/Python/clinic/instruction_sequence.c.h +++ b/Python/clinic/instruction_sequence.c.h @@ -51,7 +51,7 @@ InstructionSequenceType_use_label_impl(_PyInstructionSequence *self, int label); static PyObject * -InstructionSequenceType_use_label(_PyInstructionSequence *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +InstructionSequenceType_use_label(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -91,7 +91,7 @@ InstructionSequenceType_use_label(_PyInstructionSequence *self, PyObject *const if (label == -1 && PyErr_Occurred()) { goto exit; } - return_value = InstructionSequenceType_use_label_impl(self, label); + return_value = InstructionSequenceType_use_label_impl((_PyInstructionSequence *)self, label); exit: return return_value; @@ -113,7 +113,7 @@ InstructionSequenceType_addop_impl(_PyInstructionSequence *self, int opcode, int end_lineno, int end_col_offset); static PyObject * -InstructionSequenceType_addop(_PyInstructionSequence *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +InstructionSequenceType_addop(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -178,7 +178,7 @@ InstructionSequenceType_addop(_PyInstructionSequence *self, PyObject *const *arg if (end_col_offset == -1 && PyErr_Occurred()) { goto exit; } - return_value = InstructionSequenceType_addop_impl(self, opcode, oparg, lineno, col_offset, end_lineno, end_col_offset); + return_value = InstructionSequenceType_addop_impl((_PyInstructionSequence *)self, opcode, oparg, lineno, col_offset, end_lineno, end_col_offset); exit: return return_value; @@ -197,12 +197,12 @@ static int InstructionSequenceType_new_label_impl(_PyInstructionSequence *self); static PyObject * -InstructionSequenceType_new_label(_PyInstructionSequence *self, PyObject *Py_UNUSED(ignored)) +InstructionSequenceType_new_label(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; int _return_value; - _return_value = InstructionSequenceType_new_label_impl(self); + _return_value = InstructionSequenceType_new_label_impl((_PyInstructionSequence *)self); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -226,7 +226,7 @@ InstructionSequenceType_add_nested_impl(_PyInstructionSequence *self, PyObject *nested); static PyObject * -InstructionSequenceType_add_nested(_PyInstructionSequence *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +InstructionSequenceType_add_nested(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -263,7 +263,7 @@ InstructionSequenceType_add_nested(_PyInstructionSequence *self, PyObject *const goto exit; } nested = args[0]; - return_value = InstructionSequenceType_add_nested_impl(self, nested); + return_value = InstructionSequenceType_add_nested_impl((_PyInstructionSequence *)self, nested); exit: return return_value; @@ -282,9 +282,9 @@ static PyObject * InstructionSequenceType_get_nested_impl(_PyInstructionSequence *self); static PyObject * -InstructionSequenceType_get_nested(_PyInstructionSequence *self, PyObject *Py_UNUSED(ignored)) +InstructionSequenceType_get_nested(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return InstructionSequenceType_get_nested_impl(self); + return InstructionSequenceType_get_nested_impl((_PyInstructionSequence *)self); } PyDoc_STRVAR(InstructionSequenceType_get_instructions__doc__, @@ -300,8 +300,8 @@ static PyObject * InstructionSequenceType_get_instructions_impl(_PyInstructionSequence *self); static PyObject * -InstructionSequenceType_get_instructions(_PyInstructionSequence *self, PyObject *Py_UNUSED(ignored)) +InstructionSequenceType_get_instructions(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return InstructionSequenceType_get_instructions_impl(self); + return InstructionSequenceType_get_instructions_impl((_PyInstructionSequence *)self); } -/*[clinic end generated code: output=35163e5b589b4446 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e6b5d05bde008cc2 input=a9049054013a1b77]*/ diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h index cfcbd55388efa0..1e53624d4d45d7 100644 --- a/Python/clinic/sysmodule.c.h +++ b/Python/clinic/sysmodule.c.h @@ -373,6 +373,36 @@ sys__is_interned(PyObject *module, PyObject *arg) return return_value; } +PyDoc_STRVAR(sys__is_immortal__doc__, +"_is_immortal($module, op, /)\n" +"--\n" +"\n" +"Return True if the given object is \"immortal\" per PEP 683.\n" +"\n" +"This function should be used for specialized purposes only."); + +#define SYS__IS_IMMORTAL_METHODDEF \ + {"_is_immortal", (PyCFunction)sys__is_immortal, METH_O, sys__is_immortal__doc__}, + +static int +sys__is_immortal_impl(PyObject *module, PyObject *op); + +static PyObject * +sys__is_immortal(PyObject *module, PyObject *op) +{ + PyObject *return_value = NULL; + int _return_value; + + _return_value = sys__is_immortal_impl(module, op); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + PyDoc_STRVAR(sys_settrace__doc__, "settrace($module, function, /)\n" "--\n" @@ -1724,4 +1754,4 @@ sys__is_gil_enabled(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef SYS_GETANDROIDAPILEVEL_METHODDEF #define SYS_GETANDROIDAPILEVEL_METHODDEF #endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */ -/*[clinic end generated code: output=568b0a0069dc43e8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1e5f608092c12636 input=a9049054013a1b77]*/ diff --git a/Python/codecs.c b/Python/codecs.c index 2cb3875db35058..53680a79082634 100644 --- a/Python/codecs.c +++ b/Python/codecs.c @@ -659,91 +659,109 @@ PyObject *PyCodec_LookupError(const char *name) return handler; } -static void wrong_exception_type(PyObject *exc) + +static inline void +wrong_exception_type(PyObject *exc) { PyErr_Format(PyExc_TypeError, - "don't know how to handle %.200s in error callback", - Py_TYPE(exc)->tp_name); + "don't know how to handle %T in error callback", exc); } + +#define _PyIsUnicodeEncodeError(EXC) \ + PyObject_TypeCheck(EXC, (PyTypeObject *)PyExc_UnicodeEncodeError) +#define _PyIsUnicodeDecodeError(EXC) \ + PyObject_TypeCheck(EXC, (PyTypeObject *)PyExc_UnicodeDecodeError) +#define _PyIsUnicodeTranslateError(EXC) \ + PyObject_TypeCheck(EXC, (PyTypeObject *)PyExc_UnicodeTranslateError) + + +// --- handler: 'strict' ------------------------------------------------------ + PyObject *PyCodec_StrictErrors(PyObject *exc) { - if (PyExceptionInstance_Check(exc)) + if (PyExceptionInstance_Check(exc)) { PyErr_SetObject(PyExceptionInstance_Class(exc), exc); - else + } + else { PyErr_SetString(PyExc_TypeError, "codec must pass exception instance"); + } return NULL; } -PyObject *PyCodec_IgnoreErrors(PyObject *exc) +// --- handler: 'ignore' ------------------------------------------------------ + +static PyObject * +_PyCodec_IgnoreError(PyObject *exc, int as_bytes) { Py_ssize_t end; - - if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; + if (_PyUnicodeError_GetParams(exc, NULL, NULL, NULL, + &end, NULL, as_bytes) < 0) + { + return NULL; } - else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeDecodeError)) { - if (PyUnicodeDecodeError_GetEnd(exc, &end)) - return NULL; + return Py_BuildValue("(Nn)", Py_GetConstant(Py_CONSTANT_EMPTY_STR), end); +} + + +PyObject *PyCodec_IgnoreErrors(PyObject *exc) +{ + if (_PyIsUnicodeEncodeError(exc) || _PyIsUnicodeTranslateError(exc)) { + return _PyCodec_IgnoreError(exc, false); } - else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError)) { - if (PyUnicodeTranslateError_GetEnd(exc, &end)) - return NULL; + else if (_PyIsUnicodeDecodeError(exc)) { + return _PyCodec_IgnoreError(exc, true); } else { wrong_exception_type(exc); return NULL; } - return Py_BuildValue("(Nn)", Py_GetConstant(Py_CONSTANT_EMPTY_STR), end); } PyObject *PyCodec_ReplaceErrors(PyObject *exc) { - Py_ssize_t start, end, i, len; + Py_ssize_t start, end, slen; if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - PyObject *res; - Py_UCS1 *outp; - if (PyUnicodeEncodeError_GetStart(exc, &start)) + if (_PyUnicodeError_GetParams(exc, NULL, NULL, + &start, &end, &slen, false) < 0) { return NULL; - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; - len = end - start; - res = PyUnicode_New(len, '?'); - if (res == NULL) + } + PyObject *res = PyUnicode_New(slen, '?'); + if (res == NULL) { return NULL; + } assert(PyUnicode_KIND(res) == PyUnicode_1BYTE_KIND); - outp = PyUnicode_1BYTE_DATA(res); - for (i = 0; i < len; ++i) - outp[i] = '?'; + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + memset(outp, '?', sizeof(Py_UCS1) * slen); assert(_PyUnicode_CheckConsistency(res, 1)); return Py_BuildValue("(Nn)", res, end); } else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeDecodeError)) { - if (PyUnicodeDecodeError_GetEnd(exc, &end)) + if (_PyUnicodeError_GetParams(exc, NULL, NULL, + NULL, &end, NULL, true) < 0) { return NULL; + } return Py_BuildValue("(Cn)", (int)Py_UNICODE_REPLACEMENT_CHARACTER, end); } else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError)) { - PyObject *res; - Py_UCS2 *outp; - if (PyUnicodeTranslateError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeTranslateError_GetEnd(exc, &end)) + if (_PyUnicodeError_GetParams(exc, NULL, NULL, + &start, &end, &slen, false) < 0) { return NULL; - len = end - start; - res = PyUnicode_New(len, Py_UNICODE_REPLACEMENT_CHARACTER); - if (res == NULL) + } + PyObject *res = PyUnicode_New(slen, Py_UNICODE_REPLACEMENT_CHARACTER); + if (res == NULL) { return NULL; - assert(PyUnicode_KIND(res) == PyUnicode_2BYTE_KIND); - outp = PyUnicode_2BYTE_DATA(res); - for (i = 0; i < len; i++) + } + assert(slen == 0 || PyUnicode_KIND(res) == PyUnicode_2BYTE_KIND); + Py_UCS2 *outp = PyUnicode_2BYTE_DATA(res); + for (Py_ssize_t i = 0; i < slen; ++i) { outp[i] = Py_UNICODE_REPLACEMENT_CHARACTER; + } assert(_PyUnicode_CheckConsistency(res, 1)); return Py_BuildValue("(Nn)", res, end); } @@ -755,206 +773,218 @@ PyObject *PyCodec_ReplaceErrors(PyObject *exc) PyObject *PyCodec_XMLCharRefReplaceErrors(PyObject *exc) { - if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - PyObject *restuple; - PyObject *object; - Py_ssize_t i; - Py_ssize_t start; - Py_ssize_t end; - PyObject *res; - Py_UCS1 *outp; - Py_ssize_t ressize; - Py_UCS4 ch; - if (PyUnicodeEncodeError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeEncodeError_GetObject(exc))) - return NULL; - if (end - start > PY_SSIZE_T_MAX / (2+7+1)) - end = start + PY_SSIZE_T_MAX / (2+7+1); - for (i = start, ressize = 0; i < end; ++i) { - /* object is guaranteed to be "ready" */ - ch = PyUnicode_READ_CHAR(object, i); - if (ch<10) - ressize += 2+1+1; - else if (ch<100) - ressize += 2+2+1; - else if (ch<1000) - ressize += 2+3+1; - else if (ch<10000) - ressize += 2+4+1; - else if (ch<100000) - ressize += 2+5+1; - else if (ch<1000000) - ressize += 2+6+1; - else - ressize += 2+7+1; + if (!PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { + wrong_exception_type(exc); + return NULL; + } + + PyObject *obj; + Py_ssize_t objlen, start, end, slen; + if (_PyUnicodeError_GetParams(exc, + &obj, &objlen, + &start, &end, &slen, false) < 0) + { + return NULL; + } + + // The number of characters that each character 'ch' contributes + // in the result is 2 + k + 1, where k = min{t >= 1 | 10^t > ch} + // and will be formatted as "&#" + DIGITS + ";". Since the Unicode + // range is below 10^7, each "block" requires at most 2 + 7 + 1 + // characters. + if (slen > PY_SSIZE_T_MAX / (2 + 7 + 1)) { + end = start + PY_SSIZE_T_MAX / (2 + 7 + 1); + end = Py_MIN(end, objlen); + slen = Py_MAX(0, end - start); + } + + Py_ssize_t ressize = 0; + for (Py_ssize_t i = start; i < end; ++i) { + /* object is guaranteed to be "ready" */ + Py_UCS4 ch = PyUnicode_READ_CHAR(obj, i); + if (ch < 10) { + ressize += 2 + 1 + 1; } - /* allocate replacement */ - res = PyUnicode_New(ressize, 127); - if (res == NULL) { - Py_DECREF(object); - return NULL; + else if (ch < 100) { + ressize += 2 + 2 + 1; } - outp = PyUnicode_1BYTE_DATA(res); - /* generate replacement */ - for (i = start; i < end; ++i) { - int digits; - int base; - ch = PyUnicode_READ_CHAR(object, i); - *outp++ = '&'; - *outp++ = '#'; - if (ch<10) { - digits = 1; - base = 1; - } - else if (ch<100) { - digits = 2; - base = 10; - } - else if (ch<1000) { - digits = 3; - base = 100; - } - else if (ch<10000) { - digits = 4; - base = 1000; - } - else if (ch<100000) { - digits = 5; - base = 10000; - } - else if (ch<1000000) { - digits = 6; - base = 100000; - } - else { - digits = 7; - base = 1000000; - } - while (digits-->0) { - *outp++ = '0' + ch/base; - ch %= base; - base /= 10; - } - *outp++ = ';'; + else if (ch < 1000) { + ressize += 2 + 3 + 1; + } + else if (ch < 10000) { + ressize += 2 + 4 + 1; + } + else if (ch < 100000) { + ressize += 2 + 5 + 1; + } + else if (ch < 1000000) { + ressize += 2 + 6 + 1; + } + else { + assert(ch < 10000000); + ressize += 2 + 7 + 1; } - assert(_PyUnicode_CheckConsistency(res, 1)); - restuple = Py_BuildValue("(Nn)", res, end); - Py_DECREF(object); - return restuple; } - else { - wrong_exception_type(exc); + + /* allocate replacement */ + PyObject *res = PyUnicode_New(ressize, 127); + if (res == NULL) { + Py_DECREF(obj); return NULL; } + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + /* generate replacement */ + for (Py_ssize_t i = start; i < end; ++i) { + int digits, base; + Py_UCS4 ch = PyUnicode_READ_CHAR(obj, i); + if (ch < 10) { + digits = 1; + base = 1; + } + else if (ch < 100) { + digits = 2; + base = 10; + } + else if (ch < 1000) { + digits = 3; + base = 100; + } + else if (ch < 10000) { + digits = 4; + base = 1000; + } + else if (ch < 100000) { + digits = 5; + base = 10000; + } + else if (ch < 1000000) { + digits = 6; + base = 100000; + } + else { + assert(ch < 10000000); + digits = 7; + base = 1000000; + } + *outp++ = '&'; + *outp++ = '#'; + while (digits-- > 0) { + assert(base >= 1); + *outp++ = '0' + ch / base; + ch %= base; + base /= 10; + } + *outp++ = ';'; + } + assert(_PyUnicode_CheckConsistency(res, 1)); + PyObject *restuple = Py_BuildValue("(Nn)", res, end); + Py_DECREF(obj); + return restuple; } PyObject *PyCodec_BackslashReplaceErrors(PyObject *exc) { - PyObject *object; - Py_ssize_t i; - Py_ssize_t start; - Py_ssize_t end; - PyObject *res; - Py_UCS1 *outp; - int ressize; - Py_UCS4 c; - + PyObject *obj; + Py_ssize_t objlen, start, end, slen; if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeDecodeError)) { - const unsigned char *p; - if (PyUnicodeDecodeError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeDecodeError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeDecodeError_GetObject(exc))) + if (_PyUnicodeError_GetParams(exc, + &obj, &objlen, + &start, &end, &slen, true) < 0) + { return NULL; - p = (const unsigned char*)PyBytes_AS_STRING(object); - res = PyUnicode_New(4 * (end - start), 127); + } + PyObject *res = PyUnicode_New(4 * slen, 127); if (res == NULL) { - Py_DECREF(object); + Py_DECREF(obj); return NULL; } - outp = PyUnicode_1BYTE_DATA(res); - for (i = start; i < end; i++, outp += 4) { - unsigned char c = p[i]; + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + const unsigned char *p = (const unsigned char *)PyBytes_AS_STRING(obj); + for (Py_ssize_t i = start; i < end; i++, outp += 4) { + const unsigned char ch = p[i]; outp[0] = '\\'; outp[1] = 'x'; - outp[2] = Py_hexdigits[(c>>4)&0xf]; - outp[3] = Py_hexdigits[c&0xf]; + outp[2] = Py_hexdigits[(ch >> 4) & 0xf]; + outp[3] = Py_hexdigits[ch & 0xf]; } - assert(_PyUnicode_CheckConsistency(res, 1)); - Py_DECREF(object); + Py_DECREF(obj); return Py_BuildValue("(Nn)", res, end); } - if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - if (PyUnicodeEncodeError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeEncodeError_GetObject(exc))) - return NULL; - } - else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError)) { - if (PyUnicodeTranslateError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeTranslateError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeTranslateError_GetObject(exc))) + + if ( + PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError) + || PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError) + ) { + if (_PyUnicodeError_GetParams(exc, + &obj, &objlen, + &start, &end, &slen, false) < 0) + { return NULL; + } } else { wrong_exception_type(exc); return NULL; } - if (end - start > PY_SSIZE_T_MAX / (1+1+8)) - end = start + PY_SSIZE_T_MAX / (1+1+8); - for (i = start, ressize = 0; i < end; ++i) { + // The number of characters that each character 'ch' contributes + // in the result is 1 + 1 + k, where k >= min{t >= 1 | 16^t > ch} + // and will be formatted as "\\" + ('U'|'u'|'x') + HEXDIGITS, + // where the number of hexdigits is either 2, 4, or 8 (not 6). + // Since the Unicode range is below 10^7, we choose k = 8 whence + // each "block" requires at most 1 + 1 + 8 characters. + if (slen > PY_SSIZE_T_MAX / (1 + 1 + 8)) { + end = start + PY_SSIZE_T_MAX / (1 + 1 + 8); + end = Py_MIN(end, objlen); + slen = Py_MAX(0, end - start); + } + + Py_ssize_t ressize = 0; + for (Py_ssize_t i = start; i < end; ++i) { /* object is guaranteed to be "ready" */ - c = PyUnicode_READ_CHAR(object, i); + Py_UCS4 c = PyUnicode_READ_CHAR(obj, i); if (c >= 0x10000) { - ressize += 1+1+8; + ressize += 1 + 1 + 8; } else if (c >= 0x100) { - ressize += 1+1+4; + ressize += 1 + 1 + 4; + } + else { + ressize += 1 + 1 + 2; } - else - ressize += 1+1+2; } - res = PyUnicode_New(ressize, 127); + PyObject *res = PyUnicode_New(ressize, 127); if (res == NULL) { - Py_DECREF(object); + Py_DECREF(obj); return NULL; } - outp = PyUnicode_1BYTE_DATA(res); - for (i = start; i < end; ++i) { - c = PyUnicode_READ_CHAR(object, i); + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + for (Py_ssize_t i = start; i < end; ++i) { + Py_UCS4 c = PyUnicode_READ_CHAR(obj, i); *outp++ = '\\'; if (c >= 0x00010000) { *outp++ = 'U'; - *outp++ = Py_hexdigits[(c>>28)&0xf]; - *outp++ = Py_hexdigits[(c>>24)&0xf]; - *outp++ = Py_hexdigits[(c>>20)&0xf]; - *outp++ = Py_hexdigits[(c>>16)&0xf]; - *outp++ = Py_hexdigits[(c>>12)&0xf]; - *outp++ = Py_hexdigits[(c>>8)&0xf]; + *outp++ = Py_hexdigits[(c >> 28) & 0xf]; + *outp++ = Py_hexdigits[(c >> 24) & 0xf]; + *outp++ = Py_hexdigits[(c >> 20) & 0xf]; + *outp++ = Py_hexdigits[(c >> 16) & 0xf]; + *outp++ = Py_hexdigits[(c >> 12) & 0xf]; + *outp++ = Py_hexdigits[(c >> 8) & 0xf]; } else if (c >= 0x100) { *outp++ = 'u'; - *outp++ = Py_hexdigits[(c>>12)&0xf]; - *outp++ = Py_hexdigits[(c>>8)&0xf]; + *outp++ = Py_hexdigits[(c >> 12) & 0xf]; + *outp++ = Py_hexdigits[(c >> 8) & 0xf]; } - else + else { *outp++ = 'x'; - *outp++ = Py_hexdigits[(c>>4)&0xf]; - *outp++ = Py_hexdigits[c&0xf]; + } + *outp++ = Py_hexdigits[(c >> 4) & 0xf]; + *outp++ = Py_hexdigits[c & 0xf]; } - assert(_PyUnicode_CheckConsistency(res, 1)); - Py_DECREF(object); + Py_DECREF(obj); return Py_BuildValue("(Nn)", res, end); } @@ -1358,13 +1388,17 @@ PyCodec_SurrogateEscapeErrors(PyObject *exc) } -static PyObject *strict_errors(PyObject *self, PyObject *exc) +// --- Codecs registry handlers ----------------------------------------------- + +static inline PyObject * +strict_errors(PyObject *Py_UNUSED(self), PyObject *exc) { return PyCodec_StrictErrors(exc); } -static PyObject *ignore_errors(PyObject *self, PyObject *exc) +static inline PyObject * +ignore_errors(PyObject *Py_UNUSED(self), PyObject *exc) { return PyCodec_IgnoreErrors(exc); } diff --git a/Python/codegen.c b/Python/codegen.c index 7432415b17414e..0bf9526cdc8435 100644 --- a/Python/codegen.c +++ b/Python/codegen.c @@ -201,9 +201,6 @@ static int codegen_subscript(compiler *, expr_ty); static int codegen_slice_two_parts(compiler *, expr_ty); static int codegen_slice(compiler *, expr_ty); -static bool are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t); - - static int codegen_with(compiler *, stmt_ty, int); static int codegen_async_with(compiler *, stmt_ty, int); static int codegen_async_for(compiler *, stmt_ty); @@ -406,13 +403,7 @@ codegen_addop_j(instr_sequence *seq, location loc, assert(IS_JUMP_TARGET_LABEL(target)); assert(OPCODE_HAS_JUMP(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)); assert(!IS_ASSEMBLER_OPCODE(opcode)); - if (_PyInstructionSequence_Addop(seq, opcode, target.id, loc) != SUCCESS) { - return ERROR; - } - if (IS_CONDITIONAL_JUMP_OPCODE(opcode) || opcode == FOR_ITER) { - return _PyInstructionSequence_Addop(seq, NOT_TAKEN, 0, NO_LOCATION); - } - return SUCCESS; + return _PyInstructionSequence_Addop(seq, opcode, target.id, loc); } #define ADDOP_JUMP(C, LOC, OP, O) \ @@ -2018,7 +2009,7 @@ codegen_for(compiler *c, stmt_ty s) * but a non-generator will jump to a later instruction. */ ADDOP(c, NO_LOCATION, END_FOR); - ADDOP(c, NO_LOCATION, POP_TOP); + ADDOP(c, NO_LOCATION, POP_ITER); _PyCompile_PopFBlock(c, COMPILE_FBLOCK_FOR_LOOP, start); @@ -3216,34 +3207,6 @@ starunpack_helper_impl(compiler *c, location loc, int build, int add, int extend, int tuple) { Py_ssize_t n = asdl_seq_LEN(elts); - if (!injected_arg && n > 2 && are_all_items_const(elts, 0, n)) { - PyObject *folded = PyTuple_New(n); - if (folded == NULL) { - return ERROR; - } - for (Py_ssize_t i = 0; i < n; i++) { - PyObject *val = ((expr_ty)asdl_seq_GET(elts, i))->v.Constant.value; - PyTuple_SET_ITEM(folded, i, Py_NewRef(val)); - } - if (tuple && !pushed) { - ADDOP_LOAD_CONST_NEW(c, loc, folded); - } else { - if (add == SET_ADD) { - Py_SETREF(folded, PyFrozenSet_New(folded)); - if (folded == NULL) { - return ERROR; - } - } - ADDOP_I(c, loc, build, pushed); - ADDOP_LOAD_CONST_NEW(c, loc, folded); - ADDOP_I(c, loc, extend, 1); - if (tuple) { - ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_LIST_TO_TUPLE); - } - } - return SUCCESS; - } - int big = n + pushed + (injected_arg ? 1 : 0) > STACK_USE_GUIDELINE; int seen_star = 0; for (Py_ssize_t i = 0; i < n; i++) { @@ -3395,18 +3358,6 @@ codegen_set(compiler *c, expr_ty e) BUILD_SET, SET_ADD, SET_UPDATE, 0); } -static bool -are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end) -{ - for (Py_ssize_t i = begin; i < end; i++) { - expr_ty key = (expr_ty)asdl_seq_GET(seq, i); - if (key == NULL || key->kind != Constant_kind) { - return false; - } - } - return true; -} - static int codegen_subdict(compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end) { @@ -4114,7 +4065,10 @@ codegen_call_helper_impl(compiler *c, location loc, } assert(have_dict); } - ADDOP_I(c, loc, CALL_FUNCTION_EX, nkwelts > 0); + if (nkwelts == 0) { + ADDOP(c, loc, PUSH_NULL); + } + ADDOP(c, loc, CALL_FUNCTION_EX); return SUCCESS; } @@ -4283,7 +4237,7 @@ codegen_sync_comprehension_generator(compiler *c, location loc, * but a non-generator will jump to a later instruction. */ ADDOP(c, NO_LOCATION, END_FOR); - ADDOP(c, NO_LOCATION, POP_TOP); + ADDOP(c, NO_LOCATION, POP_ITER); } return SUCCESS; diff --git a/Python/compile.c b/Python/compile.c index ef470830336dde..b58c12d4b881ac 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -704,12 +704,12 @@ _PyCompile_ExitScope(compiler *c) assert(c->u); /* we are deleting from a list so this really shouldn't fail */ if (PySequence_DelItem(c->c_stack, n) < 0) { - PyErr_FormatUnraisable("Exception ignored on removing " + PyErr_FormatUnraisable("Exception ignored while removing " "the last compiler stack item"); } if (nested_seq != NULL) { if (_PyInstructionSequence_AddNested(c->u->u_instr_sequence, nested_seq) < 0) { - PyErr_FormatUnraisable("Exception ignored on appending " + PyErr_FormatUnraisable("Exception ignored while appending " "nested instruction sequence"); } } diff --git a/Python/context.c b/Python/context.c index 95aa82206270f9..bb1aa42b9c5e4f 100644 --- a/Python/context.c +++ b/Python/context.c @@ -419,6 +419,9 @@ class _contextvars.Context "PyContext *" "&PyContext_Type" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=bdf87f8e0cb580e8]*/ +#define _PyContext_CAST(op) ((PyContext *)(op)) + + static inline PyContext * _context_alloc(void) { @@ -513,28 +516,30 @@ context_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } static int -context_tp_clear(PyContext *self) +context_tp_clear(PyObject *op) { + PyContext *self = _PyContext_CAST(op); Py_CLEAR(self->ctx_prev); Py_CLEAR(self->ctx_vars); return 0; } static int -context_tp_traverse(PyContext *self, visitproc visit, void *arg) +context_tp_traverse(PyObject *op, visitproc visit, void *arg) { + PyContext *self = _PyContext_CAST(op); Py_VISIT(self->ctx_prev); Py_VISIT(self->ctx_vars); return 0; } static void -context_tp_dealloc(PyContext *self) +context_tp_dealloc(PyObject *self) { _PyObject_GC_UNTRACK(self); - - if (self->ctx_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); + PyContext *ctx = _PyContext_CAST(self); + if (ctx->ctx_weakreflist != NULL) { + PyObject_ClearWeakRefs(self); } (void)context_tp_clear(self); @@ -542,8 +547,9 @@ context_tp_dealloc(PyContext *self) } static PyObject * -context_tp_iter(PyContext *self) +context_tp_iter(PyObject *op) { + PyContext *self = _PyContext_CAST(op); return _PyHamt_NewIterKeys(self->ctx_vars); } @@ -575,18 +581,20 @@ context_tp_richcompare(PyObject *v, PyObject *w, int op) } static Py_ssize_t -context_tp_len(PyContext *self) +context_tp_len(PyObject *op) { + PyContext *self = _PyContext_CAST(op); return _PyHamt_Len(self->ctx_vars); } static PyObject * -context_tp_subscript(PyContext *self, PyObject *key) +context_tp_subscript(PyObject *op, PyObject *key) { if (context_check_key_type(key)) { return NULL; } PyObject *val = NULL; + PyContext *self = _PyContext_CAST(op); int found = _PyHamt_Find(self->ctx_vars, key, &val); if (found < 0) { return NULL; @@ -599,12 +607,13 @@ context_tp_subscript(PyContext *self, PyObject *key) } static int -context_tp_contains(PyContext *self, PyObject *key) +context_tp_contains(PyObject *op, PyObject *key) { if (context_check_key_type(key)) { return -1; } PyObject *val = NULL; + PyContext *self = _PyContext_CAST(op); return _PyHamt_Find(self->ctx_vars, key, &val); } @@ -701,7 +710,7 @@ _contextvars_Context_copy_impl(PyContext *self) static PyObject * -context_run(PyContext *self, PyObject *const *args, +context_run(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyThreadState *ts = _PyThreadState_GET(); @@ -712,14 +721,14 @@ context_run(PyContext *self, PyObject *const *args, return NULL; } - if (_PyContext_Enter(ts, (PyObject *)self)) { + if (_PyContext_Enter(ts, self)) { return NULL; } PyObject *call_result = _PyObject_VectorcallTstate( ts, args[0], args + 1, nargs - 1, kwnames); - if (_PyContext_Exit(ts, (PyObject *)self)) { + if (_PyContext_Exit(ts, self)) { Py_XDECREF(call_result); return NULL; } @@ -739,21 +748,12 @@ static PyMethodDef PyContext_methods[] = { }; static PySequenceMethods PyContext_as_sequence = { - 0, /* sq_length */ - 0, /* sq_concat */ - 0, /* sq_repeat */ - 0, /* sq_item */ - 0, /* sq_slice */ - 0, /* sq_ass_item */ - 0, /* sq_ass_slice */ - (objobjproc)context_tp_contains, /* sq_contains */ - 0, /* sq_inplace_concat */ - 0, /* sq_inplace_repeat */ + .sq_contains = context_tp_contains }; static PyMappingMethods PyContext_as_mapping = { - (lenfunc)context_tp_len, /* mp_length */ - (binaryfunc)context_tp_subscript, /* mp_subscript */ + .mp_length = context_tp_len, + .mp_subscript = context_tp_subscript }; PyTypeObject PyContext_Type = { @@ -763,13 +763,13 @@ PyTypeObject PyContext_Type = { .tp_methods = PyContext_methods, .tp_as_mapping = &PyContext_as_mapping, .tp_as_sequence = &PyContext_as_sequence, - .tp_iter = (getiterfunc)context_tp_iter, - .tp_dealloc = (destructor)context_tp_dealloc, + .tp_iter = context_tp_iter, + .tp_dealloc = context_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_richcompare = context_tp_richcompare, - .tp_traverse = (traverseproc)context_tp_traverse, - .tp_clear = (inquiry)context_tp_clear, + .tp_traverse = context_tp_traverse, + .tp_clear = context_tp_clear, .tp_new = context_tp_new, .tp_weaklistoffset = offsetof(PyContext, ctx_weakreflist), .tp_hash = PyObject_HashNotImplemented, @@ -860,7 +860,7 @@ contextvar_generate_hash(void *addr, PyObject *name) return -1; } - Py_hash_t res = _Py_HashPointer(addr) ^ name_hash; + Py_hash_t res = Py_HashPointer(addr) ^ name_hash; return res == -1 ? -2 : res; } @@ -909,6 +909,9 @@ class _contextvars.ContextVar "PyContextVar *" "&PyContextVar_Type" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=445da935fa8883c3]*/ +#define _PyContextVar_CAST(op) ((PyContextVar *)(op)) + + static PyObject * contextvar_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { @@ -926,8 +929,9 @@ contextvar_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } static int -contextvar_tp_clear(PyContextVar *self) +contextvar_tp_clear(PyObject *op) { + PyContextVar *self = _PyContextVar_CAST(op); Py_CLEAR(self->var_name); Py_CLEAR(self->var_default); #ifndef Py_GIL_DISABLED @@ -939,15 +943,16 @@ contextvar_tp_clear(PyContextVar *self) } static int -contextvar_tp_traverse(PyContextVar *self, visitproc visit, void *arg) +contextvar_tp_traverse(PyObject *op, visitproc visit, void *arg) { + PyContextVar *self = _PyContextVar_CAST(op); Py_VISIT(self->var_name); Py_VISIT(self->var_default); return 0; } static void -contextvar_tp_dealloc(PyContextVar *self) +contextvar_tp_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); (void)contextvar_tp_clear(self); @@ -955,14 +960,16 @@ contextvar_tp_dealloc(PyContextVar *self) } static Py_hash_t -contextvar_tp_hash(PyContextVar *self) +contextvar_tp_hash(PyObject *op) { + PyContextVar *self = _PyContextVar_CAST(op); return self->var_hash; } static PyObject * -contextvar_tp_repr(PyContextVar *self) +contextvar_tp_repr(PyObject *op) { + PyContextVar *self = _PyContextVar_CAST(op); // Estimation based on the shortest name and default value, // but maximize the pointer size. // "" @@ -1106,15 +1113,15 @@ PyTypeObject PyContextVar_Type = { sizeof(PyContextVar), .tp_methods = PyContextVar_methods, .tp_members = PyContextVar_members, - .tp_dealloc = (destructor)contextvar_tp_dealloc, + .tp_dealloc = contextvar_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)contextvar_tp_traverse, - .tp_clear = (inquiry)contextvar_tp_clear, + .tp_traverse = contextvar_tp_traverse, + .tp_clear = contextvar_tp_clear, .tp_new = contextvar_tp_new, .tp_free = PyObject_GC_Del, - .tp_hash = (hashfunc)contextvar_tp_hash, - .tp_repr = (reprfunc)contextvar_tp_repr, + .tp_hash = contextvar_tp_hash, + .tp_repr = contextvar_tp_repr, }; @@ -1129,6 +1136,9 @@ class _contextvars.Token "PyContextToken *" "&PyContextToken_Type" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=338a5e2db13d3f5b]*/ +#define _PyContextToken_CAST(op) ((PyContextToken *)(op)) + + static PyObject * token_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { @@ -1138,8 +1148,9 @@ token_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } static int -token_tp_clear(PyContextToken *self) +token_tp_clear(PyObject *op) { + PyContextToken *self = _PyContextToken_CAST(op); Py_CLEAR(self->tok_ctx); Py_CLEAR(self->tok_var); Py_CLEAR(self->tok_oldval); @@ -1147,8 +1158,9 @@ token_tp_clear(PyContextToken *self) } static int -token_tp_traverse(PyContextToken *self, visitproc visit, void *arg) +token_tp_traverse(PyObject *op, visitproc visit, void *arg) { + PyContextToken *self = _PyContextToken_CAST(op); Py_VISIT(self->tok_ctx); Py_VISIT(self->tok_var); Py_VISIT(self->tok_oldval); @@ -1156,7 +1168,7 @@ token_tp_traverse(PyContextToken *self, visitproc visit, void *arg) } static void -token_tp_dealloc(PyContextToken *self) +token_tp_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); (void)token_tp_clear(self); @@ -1164,8 +1176,9 @@ token_tp_dealloc(PyContextToken *self) } static PyObject * -token_tp_repr(PyContextToken *self) +token_tp_repr(PyObject *op) { + PyContextToken *self = _PyContextToken_CAST(op); PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); if (writer == NULL) { return NULL; @@ -1195,14 +1208,16 @@ token_tp_repr(PyContextToken *self) } static PyObject * -token_get_var(PyContextToken *self, void *Py_UNUSED(ignored)) +token_get_var(PyObject *op, void *Py_UNUSED(ignored)) { + PyContextToken *self = _PyContextToken_CAST(op); return Py_NewRef(self->tok_var);; } static PyObject * -token_get_old_value(PyContextToken *self, void *Py_UNUSED(ignored)) +token_get_old_value(PyObject *op, void *Py_UNUSED(ignored)) { + PyContextToken *self = _PyContextToken_CAST(op); if (self->tok_oldval == NULL) { return get_token_missing(); } @@ -1211,8 +1226,8 @@ token_get_old_value(PyContextToken *self, void *Py_UNUSED(ignored)) } static PyGetSetDef PyContextTokenType_getsetlist[] = { - {"var", (getter)token_get_var, NULL, NULL}, - {"old_value", (getter)token_get_old_value, NULL, NULL}, + {"var", token_get_var, NULL, NULL}, + {"old_value", token_get_old_value, NULL, NULL}, {NULL} }; @@ -1228,15 +1243,15 @@ PyTypeObject PyContextToken_Type = { sizeof(PyContextToken), .tp_methods = PyContextTokenType_methods, .tp_getset = PyContextTokenType_getsetlist, - .tp_dealloc = (destructor)token_tp_dealloc, + .tp_dealloc = token_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)token_tp_traverse, - .tp_clear = (inquiry)token_tp_clear, + .tp_traverse = token_tp_traverse, + .tp_clear = token_tp_clear, .tp_new = token_tp_new, .tp_free = PyObject_GC_Del, .tp_hash = PyObject_HashNotImplemented, - .tp_repr = (reprfunc)token_tp_repr, + .tp_repr = token_tp_repr, }; static PyContextToken * @@ -1270,7 +1285,7 @@ context_token_missing_tp_repr(PyObject *self) } static void -context_token_missing_tp_dealloc(_PyContextTokenMissing *Py_UNUSED(self)) +context_token_missing_tp_dealloc(PyObject *Py_UNUSED(self)) { #ifdef Py_DEBUG /* The singleton is statically allocated. */ @@ -1285,7 +1300,7 @@ PyTypeObject _PyContextTokenMissing_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "Token.MISSING", sizeof(_PyContextTokenMissing), - .tp_dealloc = (destructor)context_token_missing_tp_dealloc, + .tp_dealloc = context_token_missing_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT, .tp_repr = context_token_missing_tp_repr, diff --git a/Python/crossinterp.c b/Python/crossinterp.c index 0a106ad636bfe8..7eb5bc267487d1 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -784,7 +784,8 @@ _PyXI_excinfo_Apply(_PyXI_excinfo *info, PyObject *exctype) PyObject *exc = PyErr_GetRaisedException(); if (PyObject_SetAttrString(exc, "_errdisplay", tbexc) < 0) { #ifdef Py_DEBUG - PyErr_FormatUnraisable("Exception ignored when setting _errdisplay"); + PyErr_FormatUnraisable("Exception ignored while " + "setting _errdisplay"); #endif PyErr_Clear(); } diff --git a/Python/emscripten_trampoline.c b/Python/emscripten_trampoline.c index 960c6b4a2ef995..0293c7ea0f37ad 100644 --- a/Python/emscripten_trampoline.c +++ b/Python/emscripten_trampoline.c @@ -1,79 +1,203 @@ #if defined(PY_CALL_TRAMPOLINE) -#include // EM_JS +#include // EM_JS, EM_JS_DEPS #include #include "pycore_runtime.h" // _PyRuntime +typedef int (*CountArgsFunc)(PyCFunctionWithKeywords func); -/** - * This is the GoogleChromeLabs approved way to feature detect type-reflection: - * https://github.com/GoogleChromeLabs/wasm-feature-detect/blob/main/src/detectors/type-reflection/index.js - */ -EM_JS(int, _PyEM_detect_type_reflection, (), { - if (!("Function" in WebAssembly)) { - return false; - } - if (WebAssembly.Function.type) { - // Node v20 - Module.PyEM_CountArgs = (func) => WebAssembly.Function.type(wasmTable.get(func)).parameters.length; - } else { - // Node >= 22, v8-based browsers - Module.PyEM_CountArgs = (func) => wasmTable.get(func).type().parameters.length; +// Offset of emscripten_count_args_function in _PyRuntimeState. There's a couple +// of alternatives: +// 1. Just make emscripten_count_args_function a real C global variable instead +// of a field of _PyRuntimeState. This would violate our rule against mutable +// globals. +// 2. #define a preprocessor constant equal to a hard coded number and make a +// _Static_assert(offsetof(_PyRuntimeState, emscripten_count_args_function) +// == OURCONSTANT) This has the disadvantage that we have to update the hard +// coded constant when _PyRuntimeState changes +// +// So putting the mutable constant in _PyRuntime and using a immutable global to +// record the offset so we can access it from JS is probably the best way. +EMSCRIPTEN_KEEPALIVE const int _PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET = offsetof(_PyRuntimeState, emscripten_count_args_function); + +EM_JS(CountArgsFunc, _PyEM_GetCountArgsPtr, (), { + return Module._PyEM_CountArgsPtr; // initialized below +} +// Binary module for the checks. It has to be done in web assembly because +// clang/llvm have no support yet for the reference types yet. In fact, the wasm +// binary toolkit doesn't yet support the ref.test instruction either. To +// convert the following textual wasm to a binary, you can build wabt from this +// branch: https://github.com/WebAssembly/wabt/pull/2529 and then use that +// wat2wasm binary. +// +// (module +// (type $type0 (func (param) (result i32))) +// (type $type1 (func (param i32) (result i32))) +// (type $type2 (func (param i32 i32) (result i32))) +// (type $type3 (func (param i32 i32 i32) (result i32))) +// (type $blocktype (func (param i32) (result))) +// (table $funcs (import "e" "t") 0 funcref) +// (export "f" (func $f)) +// (func $f (param $fptr i32) (result i32) +// (local $fref funcref) +// local.get $fptr +// table.get $funcs +// local.tee $fref +// ref.test $type3 +// (block $b (type $blocktype) +// i32.eqz +// br_if $b +// i32.const 3 +// return +// ) +// local.get $fref +// ref.test $type2 +// (block $b (type $blocktype) +// i32.eqz +// br_if $b +// i32.const 2 +// return +// ) +// local.get $fref +// ref.test $type1 +// (block $b (type $blocktype) +// i32.eqz +// br_if $b +// i32.const 1 +// return +// ) +// local.get $fref +// ref.test $type0 +// (block $b (type $blocktype) +// i32.eqz +// br_if $b +// i32.const 0 +// return +// ) +// i32.const -1 +// ) +// ) +addOnPreRun(() => { + // Try to initialize countArgsFunc + const code = new Uint8Array([ + 0x00, 0x61, 0x73, 0x6d, // \0asm magic number + 0x01, 0x00, 0x00, 0x00, // version 1 + 0x01, 0x1b, // Type section, body is 0x1b bytes + 0x05, // 6 entries + 0x60, 0x00, 0x01, 0x7f, // (type $type0 (func (param) (result i32))) + 0x60, 0x01, 0x7f, 0x01, 0x7f, // (type $type1 (func (param i32) (result i32))) + 0x60, 0x02, 0x7f, 0x7f, 0x01, 0x7f, // (type $type2 (func (param i32 i32) (result i32))) + 0x60, 0x03, 0x7f, 0x7f, 0x7f, 0x01, 0x7f, // (type $type3 (func (param i32 i32 i32) (result i32))) + 0x60, 0x01, 0x7f, 0x00, // (type $blocktype (func (param i32) (result))) + 0x02, 0x09, // Import section, 0x9 byte body + 0x01, // 1 import (table $funcs (import "e" "t") 0 funcref) + 0x01, 0x65, // "e" + 0x01, 0x74, // "t" + 0x01, // importing a table + 0x70, // of entry type funcref + 0x00, 0x00, // table limits: no max, min of 0 + 0x03, 0x02, // Function section + 0x01, 0x01, // We're going to define one function of type 1 (func (param i32) (result i32)) + 0x07, 0x05, // export section + 0x01, // 1 export + 0x01, 0x66, // called "f" + 0x00, // a function + 0x00, // at index 0 + + 0x0a, 0x44, // Code section, + 0x01, 0x42, // one entry of length 50 + 0x01, 0x01, 0x70, // one local of type funcref + // Body of the function + 0x20, 0x00, // local.get $fptr + 0x25, 0x00, // table.get $funcs + 0x22, 0x01, // local.tee $fref + 0xfb, 0x14, 0x03, // ref.test $type3 + 0x02, 0x04, // block $b (type $blocktype) + 0x45, // i32.eqz + 0x0d, 0x00, // br_if $b + 0x41, 0x03, // i32.const 3 + 0x0f, // return + 0x0b, // end block + + 0x20, 0x01, // local.get $fref + 0xfb, 0x14, 0x02, // ref.test $type2 + 0x02, 0x04, // block $b (type $blocktype) + 0x45, // i32.eqz + 0x0d, 0x00, // br_if $b + 0x41, 0x02, // i32.const 2 + 0x0f, // return + 0x0b, // end block + + 0x20, 0x01, // local.get $fref + 0xfb, 0x14, 0x01, // ref.test $type1 + 0x02, 0x04, // block $b (type $blocktype) + 0x45, // i32.eqz + 0x0d, 0x00, // br_if $b + 0x41, 0x01, // i32.const 1 + 0x0f, // return + 0x0b, // end block + + 0x20, 0x01, // local.get $fref + 0xfb, 0x14, 0x00, // ref.test $type0 + 0x02, 0x04, // block $b (type $blocktype) + 0x45, // i32.eqz + 0x0d, 0x00, // br_if $b + 0x41, 0x00, // i32.const 0 + 0x0f, // return + 0x0b, // end block + + 0x41, 0x7f, // i32.const -1 + 0x0b // end function + ]); + let ptr = 0; + try { + const mod = new WebAssembly.Module(code); + const inst = new WebAssembly.Instance(mod, { e: { t: wasmTable } }); + ptr = addFunction(inst.exports.f); + } catch (e) { + // If something goes wrong, we'll null out _PyEM_CountFuncParams and fall + // back to the JS trampoline. } - return true; + Module._PyEM_CountArgsPtr = ptr; + const offset = HEAP32[__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET / 4]; + HEAP32[(__PyRuntime + offset) / 4] = ptr; }); +); void _Py_EmscriptenTrampoline_Init(_PyRuntimeState *runtime) { - runtime->wasm_type_reflection_available = _PyEM_detect_type_reflection(); + runtime->emscripten_count_args_function = _PyEM_GetCountArgsPtr(); } +// We have to be careful to work correctly with memory snapshots. Even if we are +// loading a memory snapshot, we need to perform the JS initialization work. +// That means we can't call the initialization code from C. Instead, we export +// this function pointer to JS and then fill it in a preRun function which runs +// unconditionally. /** * Backwards compatible trampoline works with all JS runtimes */ -EM_JS(PyObject*, -_PyEM_TrampolineCall_JavaScript, (PyCFunctionWithKeywords func, - PyObject *arg1, - PyObject *arg2, - PyObject *arg3), -{ +EM_JS(PyObject*, _PyEM_TrampolineCall_JS, (PyCFunctionWithKeywords func, PyObject *arg1, PyObject *arg2, PyObject *arg3), { return wasmTable.get(func)(arg1, arg2, arg3); -} -); - -/** - * In runtimes with WebAssembly type reflection, count the number of parameters - * and cast to the appropriate signature - */ -EM_JS(int, _PyEM_CountFuncParams, (PyCFunctionWithKeywords func), -{ - let n = _PyEM_CountFuncParams.cache.get(func); - - if (n !== undefined) { - return n; - } - n = Module.PyEM_CountArgs(func); - _PyEM_CountFuncParams.cache.set(func, n); - return n; -} -_PyEM_CountFuncParams.cache = new Map(); -) - +}); typedef PyObject* (*zero_arg)(void); typedef PyObject* (*one_arg)(PyObject*); typedef PyObject* (*two_arg)(PyObject*, PyObject*); typedef PyObject* (*three_arg)(PyObject*, PyObject*, PyObject*); - PyObject* -_PyEM_TrampolineCall_Reflection(PyCFunctionWithKeywords func, - PyObject* self, - PyObject* args, - PyObject* kw) +_PyEM_TrampolineCall(PyCFunctionWithKeywords func, + PyObject* self, + PyObject* args, + PyObject* kw) { - switch (_PyEM_CountFuncParams(func)) { + CountArgsFunc count_args = _PyRuntime.emscripten_count_args_function; + if (count_args == 0) { + return _PyEM_TrampolineCall_JS(func, self, args, kw); + } + switch (count_args(func)) { case 0: return ((zero_arg)func)(); case 1: @@ -83,8 +207,7 @@ _PyEM_TrampolineCall_Reflection(PyCFunctionWithKeywords func, case 3: return ((three_arg)func)(self, args, kw); default: - PyErr_SetString(PyExc_SystemError, - "Handler takes too many arguments"); + PyErr_SetString(PyExc_SystemError, "Handler takes too many arguments"); return NULL; } } diff --git a/Python/errors.c b/Python/errors.c index 2d362c1864ffff..0a19d898da75d7 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -314,8 +314,8 @@ _PyErr_SetLocaleString(PyObject *exception, const char *string) PyObject* _Py_HOT_FUNCTION PyErr_Occurred(void) { - /* The caller must hold the GIL. */ - assert(PyGILState_Check()); + /* The caller must hold a thread state. */ + _Py_AssertHoldsTstate(); PyThreadState *tstate = _PyThreadState_GET(); return _PyErr_Occurred(tstate); @@ -1633,7 +1633,7 @@ format_unraisable_v(const char *format, va_list va, PyObject *obj) PyObject *hook_args = make_unraisable_hook_args( tstate, exc_type, exc_value, exc_tb, err_msg, obj); if (hook_args == NULL) { - err_msg_str = ("Exception ignored on building " + err_msg_str = ("Exception ignored while building " "sys.unraisablehook arguments"); goto error; } @@ -1981,7 +1981,7 @@ _PyErr_ProgramDecodedTextObject(PyObject *filename, int lineno, const char* enco return NULL; } - FILE *fp = _Py_fopen_obj(filename, "r" PY_STDIOTEXTMODE); + FILE *fp = Py_fopen(filename, "r" PY_STDIOTEXTMODE); if (fp == NULL) { PyErr_Clear(); return NULL; diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index f7374d52705960..c1c2c8fda20a7a 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -19,7 +19,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) JUMP_TO_ERROR(); + if (err != 0) { + JUMP_TO_ERROR(); + } } break; } @@ -33,7 +35,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) JUMP_TO_ERROR(); + if (err != 0) { + JUMP_TO_ERROR(); + } } } break; @@ -81,7 +85,7 @@ PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (1) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } value = PyStackRef_DUP(value_s); stack_pointer[0] = value; @@ -209,10 +213,13 @@ break; } - case _LOAD_CONST: { + /* _LOAD_CONST is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ + + case _LOAD_CONST_MORTAL: { _PyStackRef value; oparg = CURRENT_OPARG(); - value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg)); + PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); + value = PyStackRef_FromPyObjectNew(obj); stack_pointer[0] = value; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); @@ -411,6 +418,22 @@ break; } + case _END_FOR: { + _PyStackRef value; + value = stack_pointer[-1]; + /* Don't update instr_ptr, so that POP_ITER sees + * the FOR_ITER as the previous instruction. + * This has the benign side effect that if value is + * finalized it will see the location as the FOR_ITER's. + */ + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(value); + stack_pointer = _PyFrame_GetStackPointer(frame); + break; + } + case _END_SEND: { _PyStackRef value; _PyStackRef receiver; @@ -434,7 +457,11 @@ PyObject *res_o = PyNumber_Negative(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; break; @@ -459,7 +486,11 @@ int err = PyObject_IsTrue(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = err ? PyStackRef_True : PyStackRef_False; stack_pointer[-1] = res; break; @@ -570,7 +601,11 @@ PyObject *res_o = PyNumber_Invert(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; break; @@ -624,11 +659,17 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -644,11 +685,17 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -664,11 +711,17 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -724,12 +777,18 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left_o)->ob_fval * ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -745,12 +804,18 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left_o)->ob_fval + ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -766,12 +831,18 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left_o)->ob_fval - ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -805,11 +876,17 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyUnicode_CheckExact(left_o)); + assert(PyUnicode_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = PyUnicode_Concat(left_o, right_o); - PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -824,6 +901,8 @@ left = stack_pointer[-2]; PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyUnicode_CheckExact(left_o)); + assert(PyUnicode_CheckExact(right_o)); int next_oparg; #if TIER_ONE assert(next_instr->op.code == STORE_FAST); @@ -849,12 +928,16 @@ * that the string is safe to mutate. */ assert(Py_REFCNT(left_o) >= 2); - PyStackRef_CLOSE(left); + PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local); PyUnicode_Append(&temp, right_o); *target_local = PyStackRef_FromPyObjectSteal(temp); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (PyStackRef_IsNull(*target_local)) JUMP_TO_ERROR(); + if (PyStackRef_IsNull(*target_local)) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } #if TIER_ONE // The STORE_FAST is already done. This is done here in tier one, // and during trace projection in tier two: @@ -866,6 +949,51 @@ break; } + case _GUARD_BINARY_OP_EXTEND: { + _PyStackRef right; + _PyStackRef left; + right = stack_pointer[-1]; + left = stack_pointer[-2]; + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + assert(d && d->guard); + _PyFrame_SetStackPointer(frame, stack_pointer); + int res = d->guard(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (!res) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + break; + } + + case _BINARY_OP_EXTEND: { + _PyStackRef right; + _PyStackRef left; + _PyStackRef res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + STAT_INC(BINARY_OP, hit); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *res_o = d->action(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + PyStackRef_CLOSE(left); + PyStackRef_CLOSE(right); + res = PyStackRef_FromPyObjectSteal(res_o); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + break; + } + case _BINARY_SUBSCR: { _PyStackRef sub; _PyStackRef container; @@ -879,7 +1007,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -910,16 +1042,22 @@ assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); res_o = PyObject_GetItem(PyStackRef_AsPyObjectBorrow(container), slice); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(slice); + stack_pointer = _PyFrame_GetStackPointer(frame); stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); } + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(container); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -946,14 +1084,18 @@ assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); err = PyObject_SetItem(PyStackRef_AsPyObjectBorrow(container), slice, PyStackRef_AsPyObjectBorrow(v)); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(slice); + stack_pointer = _PyFrame_GetStackPointer(frame); stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); } PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -4; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -4; assert(WITHIN_STACK_BOUNDS()); break; @@ -1001,10 +1143,14 @@ Py_INCREF(res_o); #endif PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(list_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1043,10 +1189,14 @@ STAT_INC(BINARY_SUBSCR, hit); PyObject *res_o = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(str_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1082,10 +1232,14 @@ assert(res_o != NULL); Py_INCREF(res_o); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(tuple_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1114,7 +1268,11 @@ } PyStackRef_CLOSE(dict_st); PyStackRef_CLOSE(sub_st); - if (rc <= 0) JUMP_TO_ERROR(); + if (rc <= 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } // not found or error res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; @@ -1184,7 +1342,11 @@ list = stack_pointer[-2 - (oparg-1)]; int err = _PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list), PyStackRef_AsPyObjectSteal(v)); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1201,7 +1363,11 @@ PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1221,7 +1387,11 @@ PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); break; @@ -1267,11 +1437,13 @@ PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); assert(old_value != NULL); UNLOCK_OBJECT(list); // unlock before decrefs! - Py_DECREF(old_value); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); - PyStackRef_CLOSE(list_st); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(list_st); + Py_DECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -1293,10 +1465,14 @@ PyStackRef_AsPyObjectSteal(sub), PyStackRef_AsPyObjectSteal(value)); stack_pointer = _PyFrame_GetStackPointer(frame); - PyStackRef_CLOSE(dict_st); - if (err) JUMP_TO_ERROR(); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + JUMP_TO_ERROR(); + } break; } @@ -1312,7 +1488,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -1328,7 +1508,11 @@ PyObject *res_o = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; break; @@ -1349,7 +1533,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value2_st); PyStackRef_CLOSE(value1_st); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -1361,9 +1549,7 @@ _PyStackRef retval; _PyStackRef res; retval = stack_pointer[-1]; - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -1403,13 +1589,19 @@ type->tp_name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(obj); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); iter_o = (*getter)(obj_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(obj); - if (iter_o == NULL) JUMP_TO_ERROR(); + if (iter_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } if (Py_TYPE(iter_o)->tp_as_async == NULL || Py_TYPE(iter_o)->tp_as_async->am_anext == NULL) { stack_pointer += -1; @@ -1419,9 +1611,9 @@ "'async for' received an object from __aiter__ " "that does not implement __anext__: %.100s", Py_TYPE(iter_o)->tp_name); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(iter_o); - if (true) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + JUMP_TO_ERROR(); } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; @@ -1454,7 +1646,11 @@ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) JUMP_TO_ERROR(); + if (iter_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; break; @@ -1499,9 +1695,7 @@ // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -1577,13 +1771,15 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + JUMP_TO_ERROR(); + } if (bc_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _PyErr_SetString(tstate, PyExc_NameError, "__build_class__ not found"); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } bc = PyStackRef_FromPyObjectSteal(bc_o); stack_pointer[0] = bc; @@ -1605,7 +1801,9 @@ "no locals found when storing %R", name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } if (PyDict_CheckExact(ns)) { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -1618,7 +1816,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(v); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1662,7 +1864,11 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) JUMP_TO_ERROR(); + if (res == 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); break; @@ -1766,7 +1972,11 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) JUMP_TO_ERROR(); + if (res == 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); break; @@ -1785,7 +1995,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); PyStackRef_CLOSE(owner); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -1800,7 +2014,11 @@ int err = PyObject_DelAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1815,7 +2033,11 @@ int err = PyDict_SetItem(GLOBALS(), name, PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1849,7 +2071,7 @@ _PyErr_SetString(tstate, PyExc_SystemError, "no locals found"); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } locals = PyStackRef_FromPyObjectNew(l); stack_pointer[0] = locals; @@ -1867,7 +2089,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *v_o = _PyEval_LoadName(tstate, frame, name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (v_o == NULL) JUMP_TO_ERROR(); + if (v_o == NULL) { + JUMP_TO_ERROR(); + } v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; @@ -1877,19 +2101,28 @@ case _LOAD_GLOBAL: { _PyStackRef *res; - _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); res = &stack_pointer[0]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); stack_pointer = _PyFrame_GetStackPointer(frame); - if (PyStackRef_IsNull(*res)) JUMP_TO_ERROR(); - null = PyStackRef_NULL; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); - break; + if (PyStackRef_IsNull(*res)) { + JUMP_TO_ERROR(); + } + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + break; + } + + case _PUSH_NULL_CONDITIONAL: { + _PyStackRef null = PyStackRef_NULL; + oparg = CURRENT_OPARG(); + null = PyStackRef_NULL; + if (oparg & 1) stack_pointer[0] = null; + stack_pointer += (oparg & 1); + assert(WITHIN_STACK_BOUNDS()); + break; } case _GUARD_GLOBALS_VERSION: { @@ -1953,8 +2186,6 @@ case _LOAD_GLOBAL_MODULE_FROM_KEYS: { PyDictKeysObject *globals_keys; _PyStackRef res; - _PyStackRef null = PyStackRef_NULL; - oparg = CURRENT_OPARG(); globals_keys = (PyDictKeysObject *)stack_pointer[-1].bits; uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); @@ -1976,10 +2207,8 @@ res = PyStackRef_FromPyObjectSteal(res_o); #endif STAT_INC(LOAD_GLOBAL, hit); - null = PyStackRef_NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1987,8 +2216,6 @@ case _LOAD_GLOBAL_BUILTINS_FROM_KEYS: { PyDictKeysObject *builtins_keys; _PyStackRef res; - _PyStackRef null = PyStackRef_NULL; - oparg = CURRENT_OPARG(); builtins_keys = (PyDictKeysObject *)stack_pointer[-1].bits; uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); @@ -2010,10 +2237,8 @@ res = PyStackRef_FromPyObjectSteal(res_o); #endif STAT_INC(LOAD_GLOBAL, hit); - null = PyStackRef_NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -2028,7 +2253,7 @@ PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (1) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } SETLOCAL(oparg, PyStackRef_NULL); break; @@ -2059,7 +2284,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); JUMP_TO_ERROR(); } + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(oldobj); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -2090,9 +2317,15 @@ JUMP_TO_ERROR(); } } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(class_dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); value = PyStackRef_FromPyObjectSteal(value_o); - stack_pointer[-1] = value; + stack_pointer[0] = value; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -2105,7 +2338,7 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } value = PyStackRef_FromPyObjectSteal(value_o); stack_pointer[0] = value; @@ -2153,14 +2386,20 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg); STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - if (str_o == NULL) JUMP_TO_ERROR(); + if (str_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } str = PyStackRef_FromPyObjectSteal(str_o); stack_pointer[-oparg] = str; stack_pointer += 1 - oparg; @@ -2173,8 +2412,10 @@ _PyStackRef tup; oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; - PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); - if (tup_o == NULL) JUMP_TO_ERROR(); + PyObject *tup_o = _PyTuple_FromStackRefStealOnSuccess(values, oparg); + if (tup_o == NULL) { + JUMP_TO_ERROR(); + } tup = PyStackRef_FromPyObjectSteal(tup_o); stack_pointer[-oparg] = tup; stack_pointer += 1 - oparg; @@ -2187,8 +2428,10 @@ _PyStackRef list; oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; - PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); - if (list_o == NULL) JUMP_TO_ERROR(); + PyObject *list_o = _PyList_FromStackRefStealOnSuccess(values, oparg); + if (list_o == NULL) { + JUMP_TO_ERROR(); + } list = PyStackRef_FromPyObjectSteal(list_o); stack_pointer[-oparg] = list; stack_pointer += 1 - oparg; @@ -2222,7 +2465,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(iterable_st); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } assert(Py_IsNone(none_val)); PyStackRef_CLOSE(iterable_st); @@ -2242,7 +2487,11 @@ PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -2260,7 +2509,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } int err = 0; for (int i = 0; i < oparg; i++) { @@ -2269,11 +2520,17 @@ err = PySet_Add(set_o, PyStackRef_AsPyObjectBorrow(values[i])); stack_pointer = _PyFrame_GetStackPointer(frame); } - PyStackRef_CLOSE(values[i]); + } + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(values[_i]); } if (err != 0) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(set_o); - if (true) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + JUMP_TO_ERROR(); } set = PyStackRef_FromPyObjectSteal(set_o); stack_pointer[-oparg] = set; @@ -2292,7 +2549,9 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *map_o = _PyDict_FromItems( @@ -2304,7 +2563,11 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (map_o == NULL) JUMP_TO_ERROR(); + if (map_o == NULL) { + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } map = PyStackRef_FromPyObjectSteal(map_o); stack_pointer[-oparg*2] = map; stack_pointer += 1 - oparg*2; @@ -2319,27 +2582,35 @@ _PyErr_Format(tstate, PyExc_SystemError, "no locals found when setting up annotations"); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } /* check if __annotations__ in locals()... */ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + JUMP_TO_ERROR(); + } if (ann_dict == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); ann_dict = PyDict_New(); stack_pointer = _PyFrame_GetStackPointer(frame); - if (ann_dict == NULL) JUMP_TO_ERROR(); + if (ann_dict == NULL) { + JUMP_TO_ERROR(); + } _PyFrame_SetStackPointer(frame, stack_pointer); err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), ann_dict); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(ann_dict); - if (err) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + JUMP_TO_ERROR(); + } } else { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(ann_dict); + stack_pointer = _PyFrame_GetStackPointer(frame); } break; } @@ -2367,7 +2638,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(update); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyStackRef_CLOSE(update); stack_pointer += -1; @@ -2394,7 +2667,9 @@ _PyEval_FormatKwargsError(tstate, callable_o, update_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(update); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyStackRef_CLOSE(update); stack_pointer += -1; @@ -2421,7 +2696,11 @@ PyStackRef_AsPyObjectSteal(value) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) JUMP_TO_ERROR(); + if (err != 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -2458,7 +2737,11 @@ PyStackRef_CLOSE(global_super_st); PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(self_st); - if (attr == NULL) JUMP_TO_ERROR(); + if (attr == NULL) { + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } attr_st = PyStackRef_FromPyObjectSteal(attr); stack_pointer[-3] = attr_st; stack_pointer += -2; @@ -2496,18 +2779,23 @@ PyObject *attr_o = _PySuper_Lookup(cls, self, name, Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); stack_pointer = _PyFrame_GetStackPointer(frame); - PyStackRef_CLOSE(global_super_st); - PyStackRef_CLOSE(class_st); if (attr_o == NULL) { - PyStackRef_CLOSE(self_st); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } if (method_found) { self_or_null = self_st; // transfer ownership } else { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_st); + stack_pointer = _PyFrame_GetStackPointer(frame); self_or_null = PyStackRef_NULL; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); } + PyStackRef_CLOSE(global_super_st); + PyStackRef_CLOSE(class_st); attr = PyStackRef_FromPyObjectSteal(attr_o); stack_pointer[-3] = attr; stack_pointer[-2] = self_or_null; @@ -2519,9 +2807,10 @@ case _LOAD_ATTR: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef self_or_null = PyStackRef_NULL; + _PyStackRef *self_or_null; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; + self_or_null = &stack_pointer[0]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); PyObject *attr_o; if (oparg & 1) { @@ -2536,7 +2825,7 @@ meth | self | arg1 | ... | argN */ assert(attr_o != NULL); // No errors on this branch - self_or_null = owner; // Transfer ownership + self_or_null[0] = owner; // Transfer ownership } else { /* meth is not an unbound method (but a regular attr, or @@ -2546,8 +2835,12 @@ meth | NULL | arg1 | ... | argN */ PyStackRef_CLOSE(owner); - if (attr_o == NULL) JUMP_TO_ERROR(); - self_or_null = PyStackRef_NULL; + if (attr_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } + self_or_null[0] = PyStackRef_NULL; } } else { @@ -2556,14 +2849,15 @@ attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (attr_o == NULL) JUMP_TO_ERROR(); - /* We need to define self_or_null on all paths */ - self_or_null = PyStackRef_NULL; + if (attr_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } } attr = PyStackRef_FromPyObjectSteal(attr_o); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = self_or_null; - stack_pointer += (oparg & 1); + stack_pointer += (oparg&1); assert(WITHIN_STACK_BOUNDS()); break; } @@ -2608,64 +2902,43 @@ PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_dictoffset < 0); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - if (!_PyObject_InlineValues(owner_o)->valid) { + if (!FT_ATOMIC_LOAD_UINT8(_PyObject_InlineValues(owner_o)->valid)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } break; } - case _LOAD_ATTR_INSTANCE_VALUE_0: { + case _LOAD_ATTR_INSTANCE_VALUE: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - (void)null; owner = stack_pointer[-1]; uint16_t offset = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); - PyObject *attr_o = *value_ptr; + PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr); if (attr_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - null = PyStackRef_NULL; - attr = PyStackRef_FromPyObjectSteal(attr_o); - PyStackRef_CLOSE(owner); - stack_pointer[-1] = attr; - break; - } - - case _LOAD_ATTR_INSTANCE_VALUE_1: { - _PyStackRef owner; - _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - (void)null; - owner = stack_pointer[-1]; - uint16_t offset = (uint16_t)CURRENT_OPERAND0(); - PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); - PyObject *attr_o = *value_ptr; - if (attr_o == NULL) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + #ifdef Py_GIL_DISABLED + if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) { + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } + #else + attr = PyStackRef_FromPyObjectNew(attr_o); + #endif STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - null = PyStackRef_NULL; - attr = PyStackRef_FromPyObjectSteal(attr_o); - PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; - stack_pointer[0] = null; - stack_pointer += 1; - assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } - /* _LOAD_ATTR_INSTANCE_VALUE is split on (oparg & 1) */ - case _CHECK_ATTR_MODULE_PUSH_KEYS: { _PyStackRef owner; PyDictKeysObject *mod_keys; @@ -2694,8 +2967,6 @@ PyDictKeysObject *mod_keys; _PyStackRef owner; _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - oparg = CURRENT_OPARG(); mod_keys = (PyDictKeysObject *)stack_pointer[-1].bits; owner = stack_pointer[-2]; uint16_t index = (uint16_t)CURRENT_OPERAND0(); @@ -2723,119 +2994,125 @@ attr = PyStackRef_FromPyObjectSteal(attr_o); #endif STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; - PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } case _CHECK_ATTR_WITH_HINT: { _PyStackRef owner; + PyDictObject *dict; owner = stack_pointer[-1]; PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictObject *dict = _PyObject_GetManagedDict(owner_o); - if (dict == NULL) { + PyDictObject *dict_o = _PyObject_GetManagedDict(owner_o); + if (dict_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - assert(PyDict_CheckExact((PyObject *)dict)); + assert(PyDict_CheckExact((PyObject *)dict_o)); + dict = dict_o; + stack_pointer[0].bits = (uintptr_t)dict; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_ATTR_WITH_HINT: { + PyDictObject *dict; _PyStackRef owner; _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); - owner = stack_pointer[-1]; + dict = (PyDictObject *)stack_pointer[-1].bits; + owner = stack_pointer[-2]; uint16_t hint = (uint16_t)CURRENT_OPERAND0(); - PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject *attr_o; - PyDictObject *dict = _PyObject_GetManagedDict(owner_o); + if (!LOCK_OBJECT(dict)) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + } if (hint >= (size_t)dict->ma_keys->dk_nentries) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + UNLOCK_OBJECT(dict); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); - if (!DK_IS_UNICODE(dict->ma_keys)) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) { + UNLOCK_OBJECT(dict); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; if (ep->me_key != name) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + UNLOCK_OBJECT(dict); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } attr_o = ep->me_value; if (attr_o == NULL) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + UNLOCK_OBJECT(dict); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - attr = PyStackRef_FromPyObjectSteal(attr_o); - null = PyStackRef_NULL; + attr = PyStackRef_FromPyObjectNew(attr_o); + UNLOCK_OBJECT(dict); PyStackRef_CLOSE(owner); - stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); + stack_pointer[-2] = attr; + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; } - case _LOAD_ATTR_SLOT_0: { + case _LOAD_ATTR_SLOT: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - (void)null; owner = stack_pointer[-1]; uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - char *addr = (char *)owner_o + index; - PyObject *attr_o = *(PyObject **)addr; + PyObject **addr = (PyObject **)((char *)owner_o + index); + PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr); if (attr_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; - attr = PyStackRef_FromPyObjectNew(attr_o); - PyStackRef_CLOSE(owner); - stack_pointer[-1] = attr; - break; - } - - case _LOAD_ATTR_SLOT_1: { - _PyStackRef owner; - _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - (void)null; - owner = stack_pointer[-1]; - uint16_t index = (uint16_t)CURRENT_OPERAND0(); - PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - char *addr = (char *)owner_o + index; - PyObject *attr_o = *(PyObject **)addr; - if (attr_o == NULL) { + #ifdef Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr); + if (!increfed) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; + #else attr = PyStackRef_FromPyObjectNew(attr_o); + #endif + STAT_INC(LOAD_ATTR, hit); PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; - stack_pointer[0] = null; - stack_pointer += 1; - assert(WITHIN_STACK_BOUNDS()); break; } - /* _LOAD_ATTR_SLOT is split on (oparg & 1) */ - case _CHECK_ATTR_CLASS: { _PyStackRef owner; owner = stack_pointer[-1]; @@ -2846,50 +3123,26 @@ JUMP_TO_JUMP_TARGET(); } assert(type_version != 0); - if (((PyTypeObject *)owner_o)->tp_version_tag != type_version) { + if (FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } break; } - case _LOAD_ATTR_CLASS_0: { - _PyStackRef owner; - _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - (void)null; - owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND0(); - STAT_INC(LOAD_ATTR, hit); - assert(descr != NULL); - attr = PyStackRef_FromPyObjectNew(descr); - null = PyStackRef_NULL; - PyStackRef_CLOSE(owner); - stack_pointer[-1] = attr; - break; - } - - case _LOAD_ATTR_CLASS_1: { + case _LOAD_ATTR_CLASS: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - (void)null; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)CURRENT_OPERAND0(); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = PyStackRef_FromPyObjectNew(descr); - null = PyStackRef_NULL; PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; - stack_pointer[0] = null; - stack_pointer += 1; - assert(WITHIN_STACK_BOUNDS()); break; } - /* _LOAD_ATTR_CLASS is split on (oparg & 1) */ - case _LOAD_ATTR_PROPERTY_FRAME: { _PyStackRef owner; _PyInterpreterFrame *new_frame; @@ -2960,10 +3213,12 @@ _PyDictValues_AddToInsertionOrder(values, index); } UNLOCK_OBJECT(owner_o); - Py_XDECREF(old_value); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -3027,11 +3282,13 @@ UNLOCK_OBJECT(dict); // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, // when dict only holds the strong reference to value in ep->me_value. - Py_XDECREF(old_value); STAT_INC(STORE_ATTR, hit); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -3051,10 +3308,12 @@ PyObject *old_value = *(PyObject **)addr; FT_ATOMIC_STORE_PTR_RELEASE(*(PyObject **)addr, PyStackRef_AsPyObjectSteal(value)); UNLOCK_OBJECT(owner_o); - Py_XDECREF(old_value); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -3073,15 +3332,21 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } if (oparg & 16) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int res_bool = PyObject_IsTrue(res_o); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(res_o); - if (res_bool < 0) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_bool < 0) { + JUMP_TO_ERROR(); + } res = res_bool ? PyStackRef_True : PyStackRef_False; } else { @@ -3209,7 +3474,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3237,7 +3506,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3264,7 +3537,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3287,19 +3564,29 @@ if (err < 0) { PyStackRef_CLOSE(exc_value_st); PyStackRef_CLOSE(match_type_st); - if (true) JUMP_TO_ERROR(); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyObject *match_o = NULL; PyObject *rest_o = NULL; _PyFrame_SetStackPointer(frame, stack_pointer); - int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + int res = _PyEval_ExceptionGroupMatch(frame, exc_value, match_type, &match_o, &rest_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(exc_value_st); PyStackRef_CLOSE(match_type_st); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } assert((match_o == NULL) == (rest_o == NULL)); - if (match_o == NULL) JUMP_TO_ERROR(); + if (match_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } if (!Py_IsNone(match_o)) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -3330,7 +3617,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { PyStackRef_CLOSE(right); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); int res = PyErr_GivenExceptionMatches(left_o, right_o); @@ -3356,7 +3645,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(level); PyStackRef_CLOSE(fromlist); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -3373,7 +3666,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -3408,9 +3703,13 @@ _PyFrame_SetStackPointer(frame, stack_pointer); Py_ssize_t len_i = PyObject_Length(PyStackRef_AsPyObjectBorrow(obj)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (len_i < 0) JUMP_TO_ERROR(); + if (len_i < 0) { + JUMP_TO_ERROR(); + } PyObject *len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) JUMP_TO_ERROR(); + if (len_o == NULL) { + JUMP_TO_ERROR(); + } len = PyStackRef_FromPyObjectSteal(len_o); stack_pointer[0] = len; stack_pointer += 1; @@ -3444,7 +3743,11 @@ attrs = PyStackRef_FromPyObjectSteal(attrs_o); } else { - if (_PyErr_Occurred(tstate)) JUMP_TO_ERROR(); + if (_PyErr_Occurred(tstate)) { + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } // Error! attrs = PyStackRef_None; // Failure! } @@ -3489,7 +3792,9 @@ PyObject *values_or_none_o = _PyEval_MatchKeys(tstate, PyStackRef_AsPyObjectBorrow(subject), PyStackRef_AsPyObjectBorrow(keys)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (values_or_none_o == NULL) JUMP_TO_ERROR(); + if (values_or_none_o == NULL) { + JUMP_TO_ERROR(); + } values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o); stack_pointer[0] = values_or_none; stack_pointer += 1; @@ -3506,7 +3811,11 @@ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) JUMP_TO_ERROR(); + if (iter_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; break; @@ -3726,7 +4035,9 @@ r->start = value + r->step; r->len--; PyObject *res = PyLong_FromLong(value); - if (res == NULL) JUMP_TO_ERROR(); + if (res == NULL) { + JUMP_TO_ERROR(); + } next = PyStackRef_FromPyObjectSteal(res); stack_pointer[0] = next; stack_pointer += 1; @@ -3786,7 +4097,7 @@ Py_TYPE(owner_o)->tp_name); stack_pointer = _PyFrame_GetStackPointer(frame); } - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } attr = PyStackRef_FromPyObjectSteal(attr_o); self_or_null = self_or_null_o == NULL ? @@ -3827,7 +4138,9 @@ tb = Py_None; } else { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(tb); + stack_pointer = _PyFrame_GetStackPointer(frame); } assert(PyStackRef_LongCheck(lasti)); (void)lasti; // Shut up compiler warning if asserts are off @@ -3837,7 +4150,9 @@ PyObject *res_o = PyObject_Vectorcall(exit_func_o, stack + 2 - has_self, (3 + has_self) | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -3872,7 +4187,8 @@ owner = stack_pointer[-1]; PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - if (!_PyObject_InlineValues(owner_o)->valid) { + PyDictValues *ivs = _PyObject_InlineValues(owner_o); + if (!FT_ATOMIC_LOAD_UINT8(ivs->valid)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } @@ -3885,7 +4201,8 @@ uint32_t keys_version = (uint32_t)CURRENT_OPERAND0(); PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; - if (owner_heap_type->ht_cached_keys->dk_version != keys_version) { + PyDictKeysObject *keys = owner_heap_type->ht_cached_keys; + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } @@ -3895,7 +4212,7 @@ case _LOAD_ATTR_METHOD_WITH_VALUES: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef self = PyStackRef_NULL; + _PyStackRef self; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; PyObject *descr = (PyObject *)CURRENT_OPERAND0(); @@ -3916,7 +4233,7 @@ case _LOAD_ATTR_METHOD_NO_DICT: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef self = PyStackRef_NULL; + _PyStackRef self; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; PyObject *descr = (PyObject *)CURRENT_OPERAND0(); @@ -3970,7 +4287,7 @@ owner = stack_pointer[-1]; uint16_t dictoffset = (uint16_t)CURRENT_OPERAND0(); char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset; - PyObject *dict = *(PyObject **)ptr; + PyObject *dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*(PyObject **)ptr); /* This object has a __dict__, just not yet created */ if (dict != NULL) { UOP_STAT_INC(uopcode, miss); @@ -3982,7 +4299,7 @@ case _LOAD_ATTR_METHOD_LAZY_DICT: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef self = PyStackRef_NULL; + _PyStackRef self; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; PyObject *descr = (PyObject *)CURRENT_OPERAND0(); @@ -4018,7 +4335,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } break; } @@ -4124,23 +4443,21 @@ } case _EXPAND_METHOD: { - _PyStackRef *null; + _PyStackRef *self_or_null; _PyStackRef *callable; - _PyStackRef *method; - _PyStackRef *self; oparg = CURRENT_OPARG(); - null = &stack_pointer[-1 - oparg]; + self_or_null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; - method = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - assert(PyStackRef_IsNull(null[0])); + assert(PyStackRef_IsNull(self_or_null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4174,19 +4491,22 @@ #endif PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -4197,10 +4517,15 @@ STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } - if (res_o == NULL) JUMP_TO_ERROR(); res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4226,21 +4551,20 @@ } case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { - _PyStackRef *null; + _PyStackRef *self_or_null; _PyStackRef *callable; - _PyStackRef *func; - _PyStackRef *self; oparg = CURRENT_OPARG(); - null = &stack_pointer[-1 - oparg]; + self_or_null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; - func = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; + assert(PyStackRef_IsNull(self_or_null[0])); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); STAT_INC(CALL, hit); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4472,10 +4796,12 @@ } STAT_INC(CALL, hit); res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o))); - PyStackRef_CLOSE(arg); stack_pointer[-3] = res; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4503,11 +4829,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Str(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4536,11 +4868,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PySequence_Tuple(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4592,7 +4930,9 @@ self[0] = PyStackRef_FromPyObjectSteal(self_o); _PyStackRef temp = callable[0]; init[0] = PyStackRef_FromPyObjectNew(init_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4662,40 +5002,47 @@ self_or_null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - int total_args = oparg; - if (!PyStackRef_IsNull(self_or_null[0])) { - args--; - total_args++; - } if (!PyType_Check(callable_o)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } PyTypeObject *tp = (PyTypeObject *)callable_o; + int total_args = oparg; + _PyStackRef *arguments = args; + if (!PyStackRef_IsNull(self_or_null[0])) { + arguments--; + total_args++; + } if (tp->tp_vectorcall == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } STAT_INC(CALL, hit); - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4745,12 +5092,20 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4767,8 +5122,9 @@ /* Builtin METH_FASTCALL functions, without keywords */ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } if (!PyCFunction_CheckExact(callable_o)) { @@ -4782,14 +5138,16 @@ STAT_INC(CALL, hit); PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable_o); /* res = func(self, args, nargs) */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)( @@ -4799,12 +5157,16 @@ stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4824,8 +5186,9 @@ /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } if (!PyCFunction_CheckExact(callable_o)) { @@ -4837,32 +5200,38 @@ JUMP_TO_JUMP_TARGET(); } STAT_INC(CALL, hit); - /* res = func(self, args, nargs, kwnames) */ + /* res = func(self, arguments, nargs, kwnames) */ _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFastWithKeywords cfunc = (PyCFunctionFastWithKeywords)(void(*)(void)) PyCFunction_GET_FUNCTION(callable_o); stack_pointer = _PyFrame_GetStackPointer(frame); - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4909,11 +5278,17 @@ if (res_o == NULL) { GOTO_ERROR(error); } - PyStackRef_CLOSE(callable[0]); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4930,8 +5305,9 @@ /* isinstance(o, o2) */ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } if (total_args != 2) { @@ -4944,8 +5320,8 @@ JUMP_TO_JUMP_TARGET(); } STAT_INC(CALL, hit); - _PyStackRef cls_stackref = args[1]; - _PyStackRef inst_stackref = args[0]; + _PyStackRef cls_stackref = arguments[1]; + _PyStackRef inst_stackref = arguments[0]; _PyFrame_SetStackPointer(frame, stack_pointer); int retval = PyObject_IsInstance(PyStackRef_AsPyObjectBorrow(inst_stackref), PyStackRef_AsPyObjectBorrow(cls_stackref)); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -4954,9 +5330,11 @@ } res = retval ? PyStackRef_True : PyStackRef_False; assert((!PyStackRef_IsNull(res)) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(inst_stackref); - PyStackRef_CLOSE(cls_stackref); PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -4991,17 +5369,25 @@ STAT_INC(CALL, hit); int err = _PyList_AppendTakeRef((PyListObject *)self_o, PyStackRef_AsPyObjectSteal(arg)); UNLOCK_OBJECT(self_o); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable); - if (err) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + JUMP_TO_ERROR(); + } #if TIER_ONE // Skip the following POP_TOP. This is done here in tier one, and // during trace projection in tier two: assert(next_instr->op.code == POP_TOP); SKIP_OVER(1); #endif - stack_pointer += -3; - assert(WITHIN_STACK_BOUNDS()); break; } @@ -5016,8 +5402,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -5039,8 +5426,8 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - _PyStackRef arg_stackref = args[1]; - _PyStackRef self_stackref = args[0]; + _PyStackRef arg_stackref = arguments[1]; + _PyStackRef self_stackref = arguments[0]; if (!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), method->d_common.d_type)) { UOP_STAT_INC(uopcode, miss); @@ -5056,10 +5443,16 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(self_stackref); - PyStackRef_CLOSE(arg_stackref); PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -5078,8 +5471,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -5093,21 +5487,23 @@ JUMP_TO_JUMP_TARGET(); } PyTypeObject *d_type = method->d_common.d_type; - PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *self = PyStackRef_AsPyObjectBorrow(arguments[0]); if (!Py_IS_TYPE(self, d_type)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } STAT_INC(CALL, hit); int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFastWithKeywords cfunc = @@ -5116,12 +5512,16 @@ stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -5178,12 +5578,20 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -5199,8 +5607,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -5214,21 +5623,23 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *self = PyStackRef_AsPyObjectBorrow(arguments[0]); if (!Py_IS_TYPE(self, method->d_common.d_type)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } STAT_INC(CALL, hit); int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFast cfunc = @@ -5237,12 +5648,16 @@ stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Clear the stack of the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -5274,7 +5689,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } kwnames_out = kwnames_in; stack_pointer[-1] = kwnames_out; @@ -5297,8 +5714,9 @@ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); @@ -5309,13 +5727,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *temp = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, positional_args, kwnames_o, frame + arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (temp == NULL) { JUMP_TO_ERROR(); @@ -5374,23 +5796,21 @@ } case _EXPAND_METHOD_KW: { - _PyStackRef *null; + _PyStackRef *self_or_null; _PyStackRef *callable; - _PyStackRef *method; - _PyStackRef *self; oparg = CURRENT_OPARG(); - null = &stack_pointer[-2 - oparg]; + self_or_null = &stack_pointer[-2 - oparg]; callable = &stack_pointer[-3 - oparg]; - method = &stack_pointer[-3 - oparg]; - self = &stack_pointer[-2 - oparg]; + assert(PyStackRef_IsNull(self_or_null[0])); _PyStackRef callable_s = callable[0]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); - assert(PyStackRef_IsNull(null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable_s); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -5426,20 +5846,23 @@ #endif PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - if (true) JUMP_TO_ERROR(); + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); @@ -5449,17 +5872,26 @@ positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3 - oparg] = res; - stack_pointer += -2 - oparg; + stack_pointer[-2 - oparg] = res; + stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); break; } @@ -5467,18 +5899,18 @@ /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 because it is instrumented */ case _MAKE_CALLARGS_A_TUPLE: { - _PyStackRef kwargs_in = PyStackRef_NULL; + _PyStackRef kwargs_in; _PyStackRef callargs; _PyStackRef func; _PyStackRef tuple; - _PyStackRef kwargs_out = PyStackRef_NULL; - oparg = CURRENT_OPARG(); - if (oparg & 1) { kwargs_in = stack_pointer[-(oparg & 1)]; } - callargs = stack_pointer[-1 - (oparg & 1)]; - func = stack_pointer[-3 - (oparg & 1)]; + _PyStackRef kwargs_out; + kwargs_in = stack_pointer[-1]; + callargs = stack_pointer[-2]; + func = stack_pointer[-4]; PyObject *callargs_o = PyStackRef_AsPyObjectBorrow(callargs); if (PyTuple_CheckExact(callargs_o)) { tuple = callargs; + kwargs_out = kwargs_in; } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -5493,12 +5925,18 @@ if (tuple_o == NULL) { JUMP_TO_ERROR(); } + kwargs_out = kwargs_in; + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callargs); + stack_pointer = _PyFrame_GetStackPointer(frame); tuple = PyStackRef_FromPyObjectSteal(tuple_o); + stack_pointer += 2; + assert(WITHIN_STACK_BOUNDS()); } - kwargs_out = kwargs_in; - stack_pointer[-1 - (oparg & 1)] = tuple; - if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_out; + stack_pointer[-2] = tuple; + stack_pointer[-1] = kwargs_out; break; } @@ -5513,12 +5951,20 @@ PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(codeobj_st); - if (func_obj == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (func_obj == NULL) { + JUMP_TO_ERROR(); + } _PyFunction_SetVersion( func_obj, ((PyCodeObject *)codeobj)->co_version); func = PyStackRef_FromPyObjectSteal((PyObject *)func_obj); - stack_pointer[-1] = func; + stack_pointer[0] = func; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -5551,7 +5997,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); stack_pointer = _PyFrame_GetStackPointer(frame); - if (gen == NULL) JUMP_TO_ERROR(); + if (gen == NULL) { + JUMP_TO_ERROR(); + } assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *gen_frame = &gen->gi_iframe; @@ -5575,25 +6023,25 @@ } case _BUILD_SLICE: { - _PyStackRef step = PyStackRef_NULL; - _PyStackRef stop; - _PyStackRef start; + _PyStackRef *args; _PyStackRef slice; oparg = CURRENT_OPARG(); - if (oparg == 3) { step = stack_pointer[-((oparg == 3) ? 1 : 0)]; } - stop = stack_pointer[-1 - ((oparg == 3) ? 1 : 0)]; - start = stack_pointer[-2 - ((oparg == 3) ? 1 : 0)]; - PyObject *start_o = PyStackRef_AsPyObjectBorrow(start); - PyObject *stop_o = PyStackRef_AsPyObjectBorrow(stop); - PyObject *step_o = PyStackRef_AsPyObjectBorrow(step); + args = &stack_pointer[-oparg]; + PyObject *start_o = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *stop_o = PyStackRef_AsPyObjectBorrow(args[1]); + PyObject *step_o = oparg == 3 ? PyStackRef_AsPyObjectBorrow(args[2]) : NULL; PyObject *slice_o = PySlice_New(start_o, stop_o, step_o); - PyStackRef_CLOSE(start); - PyStackRef_CLOSE(stop); - PyStackRef_XCLOSE(step); - if (slice_o == NULL) JUMP_TO_ERROR(); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (slice_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } slice = PyStackRef_FromPyObjectSteal(slice_o); - stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; - stack_pointer += -1 - ((oparg == 3) ? 1 : 0); + stack_pointer[-oparg] = slice; + stack_pointer += 1 - oparg; assert(WITHIN_STACK_BOUNDS()); break; } @@ -5609,10 +6057,18 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *result_o = conv_fn(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (result_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (result_o == NULL) { + JUMP_TO_ERROR(); + } result = PyStackRef_FromPyObjectSteal(result_o); - stack_pointer[-1] = result; + stack_pointer[0] = result; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -5627,14 +6083,24 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Format(value_o, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); } else { res = value; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-1] = res; + stack_pointer[0] = res; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -5649,7 +6115,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); PyStackRef_CLOSE(fmt_spec); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -5685,7 +6155,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(lhs); PyStackRef_CLOSE(rhs); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -5694,18 +6168,15 @@ } case _SWAP: { - _PyStackRef top_in; - _PyStackRef bottom_in; - _PyStackRef top_out; - _PyStackRef bottom_out; + _PyStackRef *top; + _PyStackRef *bottom; oparg = CURRENT_OPARG(); - top_in = stack_pointer[-1]; - bottom_in = stack_pointer[-2 - (oparg-2)]; - bottom_out = bottom_in; - top_out = top_in; + top = &stack_pointer[-1]; + bottom = &stack_pointer[-2 - (oparg-2)]; + _PyStackRef temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; assert(oparg >= 2); - stack_pointer[-2 - (oparg-2)] = top_out; - stack_pointer[-1] = bottom_out; break; } @@ -5758,9 +6229,11 @@ val = stack_pointer[-1]; int is_none = PyStackRef_IsNone(val); if (!is_none) { - PyStackRef_CLOSE(val); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(val); + stack_pointer = _PyFrame_GetStackPointer(frame); if (1) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); @@ -5775,9 +6248,11 @@ _PyStackRef val; val = stack_pointer[-1]; int is_none = PyStackRef_IsNone(val); - PyStackRef_CLOSE(val); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(val); + stack_pointer = _PyFrame_GetStackPointer(frame); if (is_none) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); @@ -5830,7 +6305,7 @@ stack_pointer = _PyFrame_GetStackPointer(frame); #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { _PyFrame_SetStackPointer(frame, stack_pointer); printf("SIDE EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); @@ -5915,34 +6390,14 @@ _PyStackRef value; pop = stack_pointer[-1]; PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); - PyStackRef_CLOSE(pop); - value = PyStackRef_FromPyObjectImmortal(ptr); - stack_pointer[-1] = value; - break; - } - - case _LOAD_CONST_INLINE_WITH_NULL: { - _PyStackRef value; - _PyStackRef null; - PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); - value = PyStackRef_FromPyObjectNew(ptr); - null = PyStackRef_NULL; - stack_pointer[0] = value; - stack_pointer[1] = null; - stack_pointer += 2; + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); - break; - } - - case _LOAD_CONST_INLINE_BORROW_WITH_NULL: { - _PyStackRef value; - _PyStackRef null; - PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(pop); + stack_pointer = _PyFrame_GetStackPointer(frame); value = PyStackRef_FromPyObjectImmortal(ptr); - null = PyStackRef_NULL; stack_pointer[0] = value; - stack_pointer[1] = null; - stack_pointer += 2; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -5960,8 +6415,6 @@ case _LOAD_GLOBAL_MODULE: { _PyStackRef res; - _PyStackRef null = PyStackRef_NULL; - oparg = CURRENT_OPARG(); uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); @@ -5972,18 +6425,14 @@ } Py_INCREF(res_o); res = PyStackRef_FromPyObjectSteal(res_o); - null = PyStackRef_NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_GLOBAL_BUILTINS: { _PyStackRef res; - _PyStackRef null = PyStackRef_NULL; - oparg = CURRENT_OPARG(); uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); @@ -5994,10 +6443,8 @@ } Py_INCREF(res_o); res = PyStackRef_FromPyObjectSteal(res_o); - null = PyStackRef_NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -6005,8 +6452,6 @@ case _LOAD_ATTR_MODULE: { _PyStackRef owner; _PyStackRef attr; - _PyStackRef null = PyStackRef_NULL; - oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); @@ -6022,22 +6467,8 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr_o); attr = PyStackRef_FromPyObjectSteal(attr_o); - null = PyStackRef_NULL; PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); - break; - } - - case _INTERNAL_INCREMENT_OPT_COUNTER: { - _PyStackRef opt; - opt = stack_pointer[-1]; - _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)PyStackRef_AsPyObjectBorrow(opt); - exe->count++; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); break; } @@ -6048,7 +6479,7 @@ _Py_CODEUNIT *target = frame->instr_ptr; #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { _PyFrame_SetStackPointer(frame, stack_pointer); printf("DYNAMIC EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); @@ -6090,7 +6521,9 @@ case _START_EXECUTOR: { PyObject *executor = (PyObject *)CURRENT_OPERAND0(); + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(tstate->previous_executor); + stack_pointer = _PyFrame_GetStackPointer(frame); tstate->previous_executor = NULL; #ifndef _Py_JIT current_executor = (_PyExecutorObject*)executor; @@ -6132,8 +6565,7 @@ case _ERROR_POP_N: { oparg = CURRENT_OPARG(); uint32_t target = (uint32_t)CURRENT_OPERAND0(); - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); + assert(oparg == 0); _PyFrame_SetStackPointer(frame, stack_pointer); frame->instr_ptr = _PyFrame_GetBytecode(frame) + target; stack_pointer = _PyFrame_GetStackPointer(frame); diff --git a/Python/fileutils.c b/Python/fileutils.c index 81276651f6df44..68d24bc6b93465 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1,6 +1,7 @@ #include "Python.h" #include "pycore_fileutils.h" // fileutils definitions #include "pycore_runtime.h" // _PyRuntime +#include "pycore_pystate.h" // _Py_AssertHoldsTstate() #include "osdefs.h" // SEP #include // mbstowcs() @@ -1311,7 +1312,7 @@ _Py_fstat(int fd, struct _Py_stat_struct *status) { int res; - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); Py_BEGIN_ALLOW_THREADS res = _Py_fstat_noraise(fd, status); @@ -1691,7 +1692,7 @@ int _Py_open(const char *pathname, int flags) { /* _Py_open() must be called with the GIL held. */ - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); return _Py_open_impl(pathname, flags, 1); } @@ -1748,8 +1749,10 @@ _Py_wfopen(const wchar_t *path, const wchar_t *mode) } -/* Open a file. Call _wfopen() on Windows, or encode the path to the filesystem - encoding and call fopen() otherwise. +/* Open a file. + + On Windows, if 'path' is a Unicode string, call _wfopen(). Otherwise, encode + the path to the filesystem encoding and call fopen(). Return the new file object on success. Raise an exception and return NULL on error. @@ -1762,32 +1765,32 @@ _Py_wfopen(const wchar_t *path, const wchar_t *mode) Release the GIL to call _wfopen() or fopen(). The caller must hold the GIL. */ FILE* -_Py_fopen_obj(PyObject *path, const char *mode) +Py_fopen(PyObject *path, const char *mode) { - FILE *f; - int async_err = 0; -#ifdef MS_WINDOWS - wchar_t wmode[10]; - int usize; - - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); if (PySys_Audit("open", "Osi", path, mode, 0) < 0) { return NULL; } - if (!PyUnicode_Check(path)) { - PyErr_Format(PyExc_TypeError, - "str file path expected under Windows, got %R", - Py_TYPE(path)); + + FILE *f; + int async_err = 0; + int saved_errno; +#ifdef MS_WINDOWS + PyObject *unicode; + if (!PyUnicode_FSDecoder(path, &unicode)) { return NULL; } - wchar_t *wpath = PyUnicode_AsWideCharString(path, NULL); - if (wpath == NULL) + wchar_t *wpath = PyUnicode_AsWideCharString(unicode, NULL); + Py_DECREF(unicode); + if (wpath == NULL) { return NULL; + } - usize = MultiByteToWideChar(CP_ACP, 0, mode, -1, - wmode, Py_ARRAY_LENGTH(wmode)); + wchar_t wmode[10]; + int usize = MultiByteToWideChar(CP_ACP, 0, mode, -1, + wmode, Py_ARRAY_LENGTH(wmode)); if (usize == 0) { PyErr_SetFromWindowsErr(0); PyMem_Free(wpath); @@ -1796,26 +1799,20 @@ _Py_fopen_obj(PyObject *path, const char *mode) do { Py_BEGIN_ALLOW_THREADS + _Py_BEGIN_SUPPRESS_IPH f = _wfopen(wpath, wmode); + _Py_END_SUPPRESS_IPH Py_END_ALLOW_THREADS } while (f == NULL && errno == EINTR && !(async_err = PyErr_CheckSignals())); - int saved_errno = errno; + saved_errno = errno; PyMem_Free(wpath); #else PyObject *bytes; - const char *path_bytes; - - assert(PyGILState_Check()); - - if (!PyUnicode_FSConverter(path, &bytes)) - return NULL; - path_bytes = PyBytes_AS_STRING(bytes); - - if (PySys_Audit("open", "Osi", path, mode, 0) < 0) { - Py_DECREF(bytes); + if (!PyUnicode_FSConverter(path, &bytes)) { return NULL; } + const char *path_bytes = PyBytes_AS_STRING(bytes); do { Py_BEGIN_ALLOW_THREADS @@ -1823,11 +1820,13 @@ _Py_fopen_obj(PyObject *path, const char *mode) Py_END_ALLOW_THREADS } while (f == NULL && errno == EINTR && !(async_err = PyErr_CheckSignals())); - int saved_errno = errno; + saved_errno = errno; Py_DECREF(bytes); #endif - if (async_err) + + if (async_err) { return NULL; + } if (f == NULL) { errno = saved_errno; @@ -1842,6 +1841,19 @@ _Py_fopen_obj(PyObject *path, const char *mode) return f; } + +// Call fclose(). +// +// On Windows, files opened by Py_fopen() in the Python DLL must be closed by +// the Python DLL to use the same C runtime version. Otherwise, calling +// fclose() directly can cause undefined behavior. +int +Py_fclose(FILE *file) +{ + return fclose(file); +} + + /* Read count bytes from fd into buf. On success, return the number of read bytes, it can be lower than count. @@ -1862,7 +1874,7 @@ _Py_read(int fd, void *buf, size_t count) int err; int async_err = 0; - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); /* _Py_read() must not be called with an exception set, otherwise the * caller may think that read() was interrupted by a signal and the signal @@ -2028,7 +2040,7 @@ _Py_write_impl(int fd, const void *buf, size_t count, int gil_held) Py_ssize_t _Py_write(int fd, const void *buf, size_t count) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); /* _Py_write() must not be called with an exception set, otherwise the * caller may think that write() was interrupted by a signal and the signal @@ -2656,7 +2668,7 @@ _Py_dup(int fd) HANDLE handle; #endif - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); #ifdef MS_WINDOWS handle = _Py_get_osfhandle(fd); diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 64df6290de06ba..a0b76050fd4af6 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -4,6 +4,7 @@ #include "Python.h" #include "pycore_flowgraph.h" #include "pycore_compile.h" +#include "pycore_intrinsics.h" #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_opcode_utils.h" @@ -522,14 +523,15 @@ no_redundant_jumps(cfg_builder *g) { static int normalize_jumps_in_block(cfg_builder *g, basicblock *b) { cfg_instr *last = basicblock_last_instr(b); - if (last == NULL || !is_jump(last) || - IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { + if (last == NULL || !IS_CONDITIONAL_JUMP_OPCODE(last->i_opcode)) { return SUCCESS; } assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); bool is_forward = last->i_target->b_visited == 0; if (is_forward) { + RETURN_IF_ERROR( + basicblock_addop(b, NOT_TAKEN, 0, last->i_loc)); return SUCCESS; } @@ -557,10 +559,6 @@ normalize_jumps_in_block(cfg_builder *g, basicblock *b) { if (backwards_jump == NULL) { return ERROR; } - assert(b->b_next->b_iused > 0); - assert(b->b_next->b_instr[0].i_opcode == NOT_TAKEN); - b->b_next->b_instr[0].i_opcode = NOP; - b->b_next->b_instr[0].i_loc = NO_LOCATION; RETURN_IF_ERROR( basicblock_addop(backwards_jump, NOT_TAKEN, 0, last->i_loc)); RETURN_IF_ERROR( @@ -1338,6 +1336,17 @@ add_const(PyObject *newconst, PyObject *consts, PyObject *const_cache) return (int)index; } +static bool +is_constant_sequence(cfg_instr *inst, int n) +{ + for (int i = 0; i < n; i++) { + if(!loads_const(inst[i].i_opcode)) { + return false; + } + } + return true; +} + /* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n with LOAD_CONST (c1, c2, ... cn). The consts table must still be in list form so that the @@ -1355,10 +1364,8 @@ fold_tuple_on_constants(PyObject *const_cache, assert(inst[n].i_opcode == BUILD_TUPLE); assert(inst[n].i_oparg == n); - for (int i = 0; i < n; i++) { - if (!loads_const(inst[i].i_opcode)) { - return SUCCESS; - } + if (!is_constant_sequence(inst, n)) { + return SUCCESS; } /* Buildup new tuple of constants */ @@ -1386,6 +1393,56 @@ fold_tuple_on_constants(PyObject *const_cache, return SUCCESS; } +#define MIN_CONST_SEQUENCE_SIZE 3 +/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cN, BUILD_LIST N + with BUILD_LIST 0, LOAD_CONST (c1, c2, ... cN), LIST_EXTEND 1, + or BUILD_SET & SET_UPDATE respectively. +*/ +static int +optimize_if_const_list_or_set(PyObject *const_cache, cfg_instr* inst, int n, PyObject *consts) +{ + assert(PyDict_CheckExact(const_cache)); + assert(PyList_CheckExact(consts)); + assert(inst[n].i_oparg == n); + + int build = inst[n].i_opcode; + assert(build == BUILD_LIST || build == BUILD_SET); + int extend = build == BUILD_LIST ? LIST_EXTEND : SET_UPDATE; + + if (n < MIN_CONST_SEQUENCE_SIZE || !is_constant_sequence(inst, n)) { + return SUCCESS; + } + PyObject *newconst = PyTuple_New(n); + if (newconst == NULL) { + return ERROR; + } + for (int i = 0; i < n; i++) { + int op = inst[i].i_opcode; + int arg = inst[i].i_oparg; + PyObject *constant = get_const_value(op, arg, consts); + if (constant == NULL) { + return ERROR; + } + PyTuple_SET_ITEM(newconst, i, constant); + } + if (build == BUILD_SET) { + PyObject *frozenset = PyFrozenSet_New(newconst); + if (frozenset == NULL) { + return ERROR; + } + Py_SETREF(newconst, frozenset); + } + int index = add_const(newconst, consts, const_cache); + RETURN_IF_ERROR(index); + INSTR_SET_OP1(&inst[0], build, 0); + for (int i = 1; i < n - 1; i++) { + INSTR_SET_OP0(&inst[i], NOP); + } + INSTR_SET_OP1(&inst[n-1], LOAD_CONST, index); + INSTR_SET_OP1(&inst[n], extend, 1); + return SUCCESS; +} + #define VISITED (-1) // Replace an arbitrary run of SWAPs and NOPs with an optimal one that has the @@ -1753,6 +1810,14 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) } } break; + case BUILD_LIST: + case BUILD_SET: + if (i >= oparg) { + if (optimize_if_const_list_or_set(const_cache, inst-oparg, oparg, consts) < 0) { + goto error; + } + } + break; case POP_JUMP_IF_NOT_NONE: case POP_JUMP_IF_NONE: switch (target->i_opcode) { @@ -1877,6 +1942,12 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) continue; } break; + case CALL_INTRINSIC_1: + // for _ in (*foo, *bar) -> for _ in [*foo, *bar] + if (oparg == INTRINSIC_LIST_TO_TUPLE && nextop == GET_ITER) { + INSTR_SET_OP0(inst, NOP); + } + break; } } diff --git a/Python/frame.c b/Python/frame.c index 6eb32bcce0b799..68ac2acbaee342 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -48,7 +48,7 @@ _PyFrame_MakeAndSetFrameObject(_PyInterpreterFrame *frame) static void take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) { - assert(frame->owner != FRAME_OWNED_BY_CSTACK); + assert(frame->owner < FRAME_OWNED_BY_INTERPRETER); assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT); Py_ssize_t size = ((char*)frame->stackpointer) - (char *)frame; memcpy((_PyInterpreterFrame *)f->_f_frame_data, frame, size); @@ -69,7 +69,7 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) _PyInterpreterFrame *prev = _PyFrame_GetFirstComplete(frame->previous); frame->previous = NULL; if (prev) { - assert(prev->owner != FRAME_OWNED_BY_CSTACK); + assert(prev->owner < FRAME_OWNED_BY_INTERPRETER); /* Link PyFrameObjects.f_back and remove link through _PyInterpreterFrame.previous */ PyFrameObject *back = _PyFrame_GetFrameObject(prev); if (back == NULL) { diff --git a/Python/gc.c b/Python/gc.c index 5b9588c8741b97..0fb2f03b0406ad 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -994,7 +994,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) /* copy-paste of weakrefobject.c's handle_callback() */ temp = PyObject_CallOneArg(callback, (PyObject *)wr); if (temp == NULL) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored on " + "calling weakref callback %R", callback); } else { Py_DECREF(temp); @@ -1476,7 +1477,7 @@ mark_stacks(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, b while (ts) { _PyInterpreterFrame *frame = ts->current_frame; while (frame) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { frame = frame->previous; continue; } @@ -1779,7 +1780,7 @@ do_gc_callback(GCState *gcstate, const char *phase, "collected", stats->collected, "uncollectable", stats->uncollectable); if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while invoking gc callbacks"); return; } } @@ -1787,7 +1788,7 @@ do_gc_callback(GCState *gcstate, const char *phase, PyObject *phase_obj = PyUnicode_FromString(phase); if (phase_obj == NULL) { Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while invoking gc callbacks"); return; } @@ -1797,7 +1798,8 @@ do_gc_callback(GCState *gcstate, const char *phase, Py_INCREF(cb); /* make sure cb doesn't go away */ r = PyObject_Vectorcall(cb, stack, 2, NULL); if (r == NULL) { - PyErr_WriteUnraisable(cb); + PyErr_FormatUnraisable("Exception ignored while " + "calling GC callback %R", cb); } else { Py_DECREF(r); @@ -2086,13 +2088,14 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) "gc", NULL, message, PyList_GET_SIZE(gcstate->garbage))) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored in GC shutdown"); } if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) { PyObject *repr = NULL, *bytes = NULL; repr = PyObject_Repr(gcstate->garbage); if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr))) { - PyErr_WriteUnraisable(gcstate->garbage); + PyErr_FormatUnraisable("Exception ignored in GC shutdown " + "while formatting garbage"); } else { PySys_WriteStderr( @@ -2344,9 +2347,12 @@ PyObject_GC_Del(void *op) #ifdef Py_DEBUG PyObject *exc = PyErr_GetRaisedException(); if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, - "gc", NULL, "Object of type %s is not untracked before destruction", - Py_TYPE(op)->tp_name)) { - PyErr_WriteUnraisable(NULL); + "gc", NULL, + "Object of type %s is not untracked " + "before destruction", + Py_TYPE(op)->tp_name)) + { + PyErr_FormatUnraisable("Exception ignored on object deallocation"); } PyErr_SetRaisedException(exc); #endif diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index f7f44407494e51..5d264e407e1cc8 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -17,6 +17,17 @@ #include "pydtrace.h" #include "pycore_uniqueid.h" // _PyObject_MergeThreadLocalRefcounts() + +// enable the "mark alive" pass of GC +#define GC_ENABLE_MARK_ALIVE 1 + +// include additional roots in "mark alive" pass +#define GC_MARK_ALIVE_EXTRA_ROOTS 1 + +// include Python stacks as set of known roots +#define GC_MARK_ALIVE_STACKS 1 + + #ifdef Py_GIL_DISABLED typedef struct _gc_runtime_state GCState; @@ -56,6 +67,10 @@ struct collection_state { PyInterpreterState *interp; GCState *gcstate; _PyGC_Reason reason; + // GH-129236: If we see an active frame without a valid stack pointer, + // we can't collect objects with deferred references because we may not + // see all references. + int skip_deferred_objects; Py_ssize_t collected; Py_ssize_t uncollectable; Py_ssize_t long_lived_total; @@ -113,28 +128,66 @@ worklist_remove(struct worklist_iter *iter) iter->next = iter->ptr; } +static inline int +gc_has_bit(PyObject *op, uint8_t bit) +{ + return (op->ob_gc_bits & bit) != 0; +} + +static inline void +gc_set_bit(PyObject *op, uint8_t bit) +{ + op->ob_gc_bits |= bit; +} + +static inline void +gc_clear_bit(PyObject *op, uint8_t bit) +{ + op->ob_gc_bits &= ~bit; +} + static inline int gc_is_frozen(PyObject *op) { - return (op->ob_gc_bits & _PyGC_BITS_FROZEN) != 0; + return gc_has_bit(op, _PyGC_BITS_FROZEN); } static inline int gc_is_unreachable(PyObject *op) { - return (op->ob_gc_bits & _PyGC_BITS_UNREACHABLE) != 0; + return gc_has_bit(op, _PyGC_BITS_UNREACHABLE); } -static void +static inline void gc_set_unreachable(PyObject *op) { - op->ob_gc_bits |= _PyGC_BITS_UNREACHABLE; + gc_set_bit(op, _PyGC_BITS_UNREACHABLE); } -static void +static inline void gc_clear_unreachable(PyObject *op) { - op->ob_gc_bits &= ~_PyGC_BITS_UNREACHABLE; + gc_clear_bit(op, _PyGC_BITS_UNREACHABLE); +} + +static inline int +gc_is_alive(PyObject *op) +{ + return gc_has_bit(op, _PyGC_BITS_ALIVE); +} + +#ifdef GC_ENABLE_MARK_ALIVE +static inline void +gc_set_alive(PyObject *op) +{ + gc_set_bit(op, _PyGC_BITS_ALIVE); +} +#endif + +static inline void +gc_clear_alive(PyObject *op) +{ + gc_clear_bit(op, _PyGC_BITS_ALIVE); } // Initialize the `ob_tid` field to zero if the object is not already @@ -143,6 +196,7 @@ static void gc_maybe_init_refs(PyObject *op) { if (!gc_is_unreachable(op)) { + assert(!gc_is_alive(op)); gc_set_unreachable(op); op->ob_tid = 0; } @@ -264,9 +318,13 @@ static void gc_restore_refs(PyObject *op) { if (gc_is_unreachable(op)) { + assert(!gc_is_alive(op)); gc_restore_tid(op); gc_clear_unreachable(op); } + else { + gc_clear_alive(op); + } } // Given a mimalloc memory block return the PyObject stored in it or NULL if @@ -359,9 +417,6 @@ gc_visit_heaps(PyInterpreterState *interp, mi_block_visit_fun *visitor, static inline void gc_visit_stackref(_PyStackRef stackref) { - // Note: we MUST check that it is deferred before checking the rest. - // Otherwise we might read into invalid memory due to non-deferred references - // being dead already. if (PyStackRef_IsDeferred(stackref) && !PyStackRef_IsNull(stackref)) { PyObject *obj = PyStackRef_AsPyObjectBorrow(stackref); if (_PyObject_GC_IS_TRACKED(obj) && !gc_is_frozen(obj)) { @@ -372,25 +427,152 @@ gc_visit_stackref(_PyStackRef stackref) // Add 1 to the gc_refs for every deferred reference on each thread's stack. static void -gc_visit_thread_stacks(PyInterpreterState *interp) +gc_visit_thread_stacks(PyInterpreterState *interp, struct collection_state *state) { _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { - PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); - if (executable == NULL || !PyCode_Check(executable)) { + if (f->owner >= FRAME_OWNED_BY_INTERPRETER) { + continue; + } + + _PyStackRef *top = f->stackpointer; + if (top == NULL) { + // GH-129236: The stackpointer may be NULL in cases where + // the GC is run during a PyStackRef_CLOSE() call. Skip this + // frame and don't collect objects with deferred references. + state->skip_deferred_objects = 1; continue; } - PyCodeObject *co = (PyCodeObject *)executable; - int max_stack = co->co_nlocalsplus + co->co_stacksize; gc_visit_stackref(f->f_executable); - for (int i = 0; i < max_stack; i++) { - gc_visit_stackref(f->localsplus[i]); + while (top != f->localsplus) { + --top; + gc_visit_stackref(*top); + } + } + } + _Py_FOR_EACH_TSTATE_END(interp); +} + +// Untrack objects that can never create reference cycles. +// Return true if the object was untracked. +static bool +gc_maybe_untrack(PyObject *op) +{ + // Currently we only check for tuples containing only non-GC objects. In + // theory we could check other immutable objects that contain references + // to non-GC objects. + if (PyTuple_CheckExact(op)) { + _PyTuple_MaybeUntrack(op); + if (!_PyObject_GC_IS_TRACKED(op)) { + return true; + } + } + return false; +} + +#ifdef GC_ENABLE_MARK_ALIVE +static int +mark_alive_stack_push(PyObject *op, _PyObjectStack *stack) +{ + if (op == NULL) { + return 0; + } + if (!_PyObject_GC_IS_TRACKED(op)) { + return 0; + } + if (gc_is_alive(op)) { + return 0; // already visited this object + } + if (gc_maybe_untrack(op)) { + return 0; // was untracked, don't visit it + } + + // Need to call tp_traverse on this object. Add to stack and mark it + // alive so we don't traverse it a second time. + gc_set_alive(op); + if (_PyObjectStack_Push(stack, op) < 0) { + return -1; + } + return 0; +} + +static bool +gc_clear_alive_bits(const mi_heap_t *heap, const mi_heap_area_t *area, + void *block, size_t block_size, void *args) +{ + PyObject *op = op_from_block(block, args, false); + if (op == NULL) { + return true; + } + if (gc_is_alive(op)) { + gc_clear_alive(op); + } + return true; +} + +static void +gc_abort_mark_alive(PyInterpreterState *interp, + struct collection_state *state, + _PyObjectStack *stack) +{ + // We failed to allocate memory for "stack" while doing the "mark + // alive" phase. In that case, free the object stack and make sure + // that no objects have the alive bit set. + _PyObjectStack_Clear(stack); + gc_visit_heaps(interp, &gc_clear_alive_bits, &state->base); +} + +#ifdef GC_MARK_ALIVE_STACKS +static int +gc_visit_stackref_mark_alive(_PyObjectStack *stack, _PyStackRef stackref) +{ + if (!PyStackRef_IsNull(stackref)) { + PyObject *op = PyStackRef_AsPyObjectBorrow(stackref); + if (mark_alive_stack_push(op, stack) < 0) { + return -1; + } + } + return 0; +} + +static int +gc_visit_thread_stacks_mark_alive(PyInterpreterState *interp, _PyObjectStack *stack) +{ + int err = 0; + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { + for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { + if (f->owner >= FRAME_OWNED_BY_INTERPRETER) { + continue; + } + + if (f->stackpointer == NULL) { + // GH-129236: The stackpointer may be NULL in cases where + // the GC is run during a PyStackRef_CLOSE() call. Skip this + // frame for now. + continue; + } + + _PyStackRef *top = f->stackpointer; + if (gc_visit_stackref_mark_alive(stack, f->f_executable) < 0) { + err = -1; + goto exit; + } + while (top != f->localsplus) { + --top; + if (gc_visit_stackref_mark_alive(stack, *top) < 0) { + err = -1; + goto exit; + } } } } +exit: _Py_FOR_EACH_TSTATE_END(interp); + return err; } +#endif // GC_MARK_ALIVE_STACKS +#endif // GC_ENABLE_MARK_ALIVE static void queue_untracked_obj_decref(PyObject *op, struct collection_state *state) @@ -460,7 +642,8 @@ visit_decref(PyObject *op, void *arg) { if (_PyObject_GC_IS_TRACKED(op) && !_Py_IsImmortal(op) - && !gc_is_frozen(op)) + && !gc_is_frozen(op) + && !gc_is_alive(op)) { // If update_refs hasn't reached this object yet, mark it // as (tentatively) unreachable and initialize ob_tid to zero. @@ -482,6 +665,10 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } + if (gc_is_alive(op)) { + return true; + } + // Exclude immortal objects from garbage collection if (_Py_IsImmortal(op)) { op->ob_tid = 0; @@ -497,14 +684,9 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area, _PyObject_ASSERT(op, refcount >= 0); if (refcount > 0 && !_PyObject_HasDeferredRefcount(op)) { - // Untrack tuples and dicts as necessary in this pass, but not objects - // with zero refcount, which we will want to collect. - if (PyTuple_CheckExact(op)) { - _PyTuple_MaybeUntrack(op); - if (!_PyObject_GC_IS_TRACKED(op)) { - gc_restore_refs(op); - return true; - } + if (gc_maybe_untrack(op)) { + gc_restore_refs(op); + return true; } } @@ -553,6 +735,21 @@ mark_reachable(PyObject *op) } #ifdef GC_DEBUG +static bool +validate_alive_bits(const mi_heap_t *heap, const mi_heap_area_t *area, + void *block, size_t block_size, void *args) +{ + PyObject *op = op_from_block(block, args, false); + if (op == NULL) { + return true; + } + + _PyObject_ASSERT_WITH_MSG(op, !gc_is_alive(op), + "object should not be marked as alive yet"); + + return true; +} + static bool validate_refcounts(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *args) @@ -586,6 +783,11 @@ validate_gc_objects(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } + if (gc_is_alive(op)) { + _PyObject_ASSERT(op, !gc_is_unreachable(op)); + return true; + } + _PyObject_ASSERT(op, gc_is_unreachable(op)); _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0, "refcount is too small"); @@ -605,7 +807,20 @@ mark_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0, "refcount is too small"); - if (gc_is_unreachable(op) && gc_get_refs(op) != 0) { + if (gc_is_alive(op) || !gc_is_unreachable(op)) { + // Object was already marked as reachable. + return true; + } + + // GH-129236: If we've seen an active frame without a valid stack pointer, + // then we can't collect objects with deferred references because we may + // have missed some reference to the object on the stack. In that case, + // treat the object as reachable even if gc_refs is zero. + struct collection_state *state = (struct collection_state *)args; + int keep_alive = (state->skip_deferred_objects && + _PyObject_HasDeferredRefcount(op)); + + if (gc_get_refs(op) != 0 || keep_alive) { // Object is reachable but currently marked as unreachable. // Mark it as reachable and traverse its pointers to find // any other object that may be directly reachable from it. @@ -630,6 +845,7 @@ restore_refs(const mi_heap_t *heap, const mi_heap_area_t *area, } gc_restore_tid(op); gc_clear_unreachable(op); + gc_clear_alive(op); return true; } @@ -679,6 +895,7 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, // object is reachable, restore `ob_tid`; we're done with these objects gc_restore_tid(op); + gc_clear_alive(op); state->long_lived_total++; return true; } @@ -686,6 +903,89 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, static int move_legacy_finalizer_reachable(struct collection_state *state); +#ifdef GC_ENABLE_MARK_ALIVE +static int +propagate_alive_bits(_PyObjectStack *stack) +{ + for (;;) { + PyObject *op = _PyObjectStack_Pop(stack); + if (op == NULL) { + break; + } + assert(_PyObject_GC_IS_TRACKED(op)); + assert(gc_is_alive(op)); + traverseproc traverse = Py_TYPE(op)->tp_traverse; + if (traverse(op, (visitproc)&mark_alive_stack_push, stack) < 0) { + return -1; + } + } + return 0; +} + +// Using tp_traverse, mark everything reachable from known root objects +// (which must be non-garbage) as alive (_PyGC_BITS_ALIVE is set). In +// most programs, this marks nearly all objects that are not actually +// unreachable. +// +// Actually alive objects can be missed in this pass if they are alive +// due to being referenced from an unknown root (e.g. an extension +// module global), some tp_traverse methods are either missing or not +// accurate, or objects that have been untracked. Objects that are only +// reachable from the aforementioned are also missed. +// +// If gc.freeze() is used, this pass is disabled since it is unlikely to +// help much. The next stages of cyclic GC will ignore objects with the +// alive bit set. +// +// Returns -1 on failure (out of memory). +static int +mark_alive_from_roots(PyInterpreterState *interp, + struct collection_state *state) +{ +#ifdef GC_DEBUG + // Check that all objects don't have alive bit set + gc_visit_heaps(interp, &validate_alive_bits, &state->base); +#endif + _PyObjectStack stack = { NULL }; + + #define STACK_PUSH(op) \ + if (mark_alive_stack_push(op, &stack) < 0) { \ + gc_abort_mark_alive(interp, state, &stack); \ + return -1; \ + } + STACK_PUSH(interp->sysdict); +#ifdef GC_MARK_ALIVE_EXTRA_ROOTS + STACK_PUSH(interp->builtins); + STACK_PUSH(interp->dict); + struct types_state *types = &interp->types; + for (int i = 0; i < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES; i++) { + STACK_PUSH(types->builtins.initialized[i].tp_dict); + STACK_PUSH(types->builtins.initialized[i].tp_subclasses); + } + for (int i = 0; i < _Py_MAX_MANAGED_STATIC_EXT_TYPES; i++) { + STACK_PUSH(types->for_extensions.initialized[i].tp_dict); + STACK_PUSH(types->for_extensions.initialized[i].tp_subclasses); + } +#endif +#ifdef GC_MARK_ALIVE_STACKS + if (gc_visit_thread_stacks_mark_alive(interp, &stack) < 0) { + gc_abort_mark_alive(interp, state, &stack); + return -1; + } +#endif + #undef STACK_PUSH + + // Use tp_traverse to find everything reachable from roots. + if (propagate_alive_bits(&stack) < 0) { + gc_abort_mark_alive(interp, state, &stack); + return -1; + } + + return 0; +} +#endif // GC_ENABLE_MARK_ALIVE + + static int deduce_unreachable_heap(PyInterpreterState *interp, struct collection_state *state) @@ -709,7 +1009,7 @@ deduce_unreachable_heap(PyInterpreterState *interp, #endif // Visit the thread stacks to account for any deferred references. - gc_visit_thread_stacks(interp); + gc_visit_thread_stacks(interp, state); // Transitively mark reachable objects by clearing the // _PyGC_BITS_UNREACHABLE flag. @@ -828,7 +1128,8 @@ call_weakref_callbacks(struct collection_state *state) /* copy-paste of weakrefobject.c's handle_callback() */ PyObject *temp = PyObject_CallOneArg(callback, (PyObject *)wr); if (temp == NULL) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored while " + "calling weakref callback %R", callback); } else { Py_DECREF(temp); @@ -1127,7 +1428,8 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, "collected", collected, "uncollectable", uncollectable); if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while " + "invoking gc callbacks"); return; } } @@ -1135,7 +1437,8 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, PyObject *phase_obj = PyUnicode_FromString(phase); if (phase_obj == NULL) { Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while " + "invoking gc callbacks"); return; } @@ -1145,7 +1448,8 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, Py_INCREF(cb); /* make sure cb doesn't go away */ r = PyObject_Vectorcall(cb, stack, 2, NULL); if (r == NULL) { - PyErr_WriteUnraisable(cb); + PyErr_FormatUnraisable("Exception ignored while " + "calling GC callback %R", cb); } else { Py_DECREF(r); @@ -1245,6 +1549,25 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, process_delayed_frees(interp, state); + #ifdef GC_ENABLE_MARK_ALIVE + // If gc.freeze() was used, it seems likely that doing this "mark alive" + // pass will not be a performance win. Typically the majority of alive + // objects will be marked as frozen and will be skipped anyhow, without + // doing this extra work. Doing this pass also defeats one of the + // purposes of using freeze: avoiding writes to objects that are frozen. + // So, we just skip this if gc.freeze() was used. + if (!state->gcstate->freeze_active) { + // Mark objects reachable from known roots as "alive". These will + // be ignored for rest of the GC pass. + int err = mark_alive_from_roots(interp, state); + if (err < 0) { + _PyEval_StartTheWorld(interp); + PyErr_NoMemory(); + return; + } + } + #endif + // Find unreachable objects int err = deduce_unreachable_heap(interp, state); if (err < 0) { @@ -1253,6 +1576,11 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, return; } +#ifdef GC_DEBUG + // At this point, no object should have the alive bit set + gc_visit_heaps(interp, &validate_alive_bits, &state->base); +#endif + // Print debugging information. if (interp->gc.debug & _PyGC_DEBUG_COLLECTABLE) { PyObject *op; @@ -1564,6 +1892,8 @@ _PyGC_Freeze(PyInterpreterState *interp) { struct visitor_args args; _PyEval_StopTheWorld(interp); + GCState *gcstate = get_gc_state(); + gcstate->freeze_active = true; gc_visit_heaps(interp, &visit_freeze, &args); _PyEval_StartTheWorld(interp); } @@ -1574,7 +1904,7 @@ visit_unfreeze(const mi_heap_t *heap, const mi_heap_area_t *area, { PyObject *op = op_from_block(block, args, true); if (op != NULL) { - op->ob_gc_bits &= ~_PyGC_BITS_FROZEN; + gc_clear_bit(op, _PyGC_BITS_FROZEN); } return true; } @@ -1584,6 +1914,8 @@ _PyGC_Unfreeze(PyInterpreterState *interp) { struct visitor_args args; _PyEval_StopTheWorld(interp); + GCState *gcstate = get_gc_state(); + gcstate->freeze_active = false; gc_visit_heaps(interp, &visit_unfreeze, &args); _PyEval_StartTheWorld(interp); } @@ -1699,13 +2031,14 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) "gc", NULL, message, PyList_GET_SIZE(gcstate->garbage))) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored in GC shutdown"); } if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) { PyObject *repr = NULL, *bytes = NULL; repr = PyObject_Repr(gcstate->garbage); if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr))) { - PyErr_WriteUnraisable(gcstate->garbage); + PyErr_FormatUnraisable("Exception ignored in GC shutdown " + "while formatting garbage"); } else { PySys_WriteStderr( @@ -1913,9 +2246,12 @@ PyObject_GC_Del(void *op) #ifdef Py_DEBUG PyObject *exc = PyErr_GetRaisedException(); if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, - "gc", NULL, "Object of type %s is not untracked before destruction", - ((PyObject*)op)->ob_type->tp_name)) { - PyErr_WriteUnraisable(NULL); + "gc", NULL, + "Object of type %s is not untracked " + "before destruction", + Py_TYPE(op)->tp_name)) + { + PyErr_FormatUnraisable("Exception ignored on object deallocation"); } PyErr_SetRaisedException(exc); #endif diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 98743c27c38524..44a78c410485c0 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -8,13 +8,20 @@ #endif #define TIER_ONE 1 +#if !USE_COMPUTED_GOTOS + dispatch_opcode: + switch (opcode) +#endif + { + /* BEGIN INSTRUCTIONS */ + TARGET(BINARY_OP) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP); - PREDICTED(BINARY_OP); - _Py_CODEUNIT* const this_instr = next_instr - 2; + PREDICTED_BINARY_OP:; + _Py_CODEUNIT* const this_instr = next_instr - 6; (void)this_instr; _PyStackRef lhs; _PyStackRef rhs; @@ -39,6 +46,7 @@ assert(NB_ADD <= oparg); assert(oparg <= NB_INPLACE_XOR); } + /* Skip 4 cache entries */ // _BINARY_OP { PyObject *lhs_o = PyStackRef_AsPyObjectBorrow(lhs); @@ -49,7 +57,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(lhs); PyStackRef_CLOSE(rhs); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -60,9 +70,9 @@ TARGET(BINARY_OP_ADD_FLOAT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_ADD_FLOAT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -75,17 +85,21 @@ DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_ADD_FLOAT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left_o)->ob_fval + ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -96,9 +110,9 @@ TARGET(BINARY_OP_ADD_INT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_ADD_INT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -111,16 +125,20 @@ DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_ADD_INT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -131,9 +149,9 @@ TARGET(BINARY_OP_ADD_UNICODE) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_ADD_UNICODE); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -146,16 +164,67 @@ DEOPT_IF(!PyUnicode_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_ADD_UNICODE { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyUnicode_CheckExact(left_o)); + assert(PyUnicode_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = PyUnicode_Concat(left_o, right_o); - PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); + if (res_o == NULL) { + goto pop_2_error; + } + res = PyStackRef_FromPyObjectSteal(res_o); + } + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + + TARGET(BINARY_OP_EXTEND) { + _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; + next_instr += 6; + INSTRUCTION_STATS(BINARY_OP_EXTEND); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); + _PyStackRef left; + _PyStackRef right; + _PyStackRef res; + /* Skip 1 cache entry */ + // _GUARD_BINARY_OP_EXTEND + { + right = stack_pointer[-1]; + left = stack_pointer[-2]; + PyObject *descr = read_obj(&this_instr[2].cache); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + assert(d && d->guard); + _PyFrame_SetStackPointer(frame, stack_pointer); + int res = d->guard(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + DEOPT_IF(!res, BINARY_OP); + } + /* Skip -4 cache entry */ + // _BINARY_OP_EXTEND + { + PyObject *descr = read_obj(&this_instr[2].cache); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + STAT_INC(BINARY_OP, hit); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *res_o = d->action(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + PyStackRef_CLOSE(left); + PyStackRef_CLOSE(right); res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -166,9 +235,9 @@ TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_INPLACE_ADD_UNICODE); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; // _GUARD_BOTH_UNICODE @@ -180,11 +249,13 @@ DEOPT_IF(!PyUnicode_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_INPLACE_ADD_UNICODE { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyUnicode_CheckExact(left_o)); + assert(PyUnicode_CheckExact(right_o)); int next_oparg; #if TIER_ONE assert(next_instr->op.code == STORE_FAST); @@ -207,12 +278,14 @@ * that the string is safe to mutate. */ assert(Py_REFCNT(left_o) >= 2); - PyStackRef_CLOSE(left); + PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local); PyUnicode_Append(&temp, right_o); *target_local = PyStackRef_FromPyObjectSteal(temp); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (PyStackRef_IsNull(*target_local)) goto pop_2_error; + if (PyStackRef_IsNull(*target_local)) { + goto pop_2_error; + } #if TIER_ONE // The STORE_FAST is already done. This is done here in tier one, // and during trace projection in tier two: @@ -227,9 +300,9 @@ TARGET(BINARY_OP_MULTIPLY_FLOAT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_MULTIPLY_FLOAT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -242,17 +315,21 @@ DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_MULTIPLY_FLOAT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left_o)->ob_fval * ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -263,9 +340,9 @@ TARGET(BINARY_OP_MULTIPLY_INT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_MULTIPLY_INT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -278,16 +355,20 @@ DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_MULTIPLY_INT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -298,9 +379,9 @@ TARGET(BINARY_OP_SUBTRACT_FLOAT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_SUBTRACT_FLOAT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -313,17 +394,21 @@ DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_SUBTRACT_FLOAT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyFloat_CheckExact(left_o)); + assert(PyFloat_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left_o)->ob_fval - ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -334,9 +419,9 @@ TARGET(BINARY_OP_SUBTRACT_INT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_SUBTRACT_INT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -349,16 +434,20 @@ DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_SUBTRACT_INT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(PyLong_CheckExact(left_o)); + assert(PyLong_CheckExact(right_o)); STAT_INC(BINARY_OP, hit); PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -402,17 +491,23 @@ assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); res_o = PyObject_GetItem(PyStackRef_AsPyObjectBorrow(container), slice); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(slice); + stack_pointer = _PyFrame_GetStackPointer(frame); stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); } + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(container); - if (res_o == NULL) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -421,7 +516,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(BINARY_SUBSCR); - PREDICTED(BINARY_SUBSCR); + PREDICTED_BINARY_SUBSCR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef container; @@ -455,7 +550,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -490,7 +587,9 @@ } PyStackRef_CLOSE(dict_st); PyStackRef_CLOSE(sub_st); - if (rc <= 0) goto pop_2_error; + if (rc <= 0) { + goto pop_2_error; + } // not found or error res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; @@ -590,10 +689,14 @@ Py_INCREF(res_o); #endif PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(list_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -622,10 +725,14 @@ STAT_INC(BINARY_SUBSCR, hit); PyObject *res_o = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(str_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -654,10 +761,14 @@ assert(res_o != NULL); Py_INCREF(res_o); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(tuple_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -669,10 +780,8 @@ _PyStackRef *values; _PyStackRef list; values = &stack_pointer[-oparg]; - PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); + PyObject *list_o = _PyList_FromStackRefStealOnSuccess(values, oparg); if (list_o == NULL) { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } list = PyStackRef_FromPyObjectSteal(list_o); @@ -694,11 +803,9 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - { - stack_pointer += -oparg*2; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *map_o = _PyDict_FromItems( @@ -736,11 +843,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } int err = 0; for (int i = 0; i < oparg; i++) { @@ -749,15 +854,17 @@ err = PySet_Add(set_o, PyStackRef_AsPyObjectBorrow(values[i])); stack_pointer = _PyFrame_GetStackPointer(frame); } - PyStackRef_CLOSE(values[i]); + } + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(values[_i]); } if (err != 0) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(set_o); - { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer = _PyFrame_GetStackPointer(frame); + goto error; } set = PyStackRef_FromPyObjectSteal(set_o); stack_pointer[-oparg] = set; @@ -770,28 +877,24 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(BUILD_SLICE); - _PyStackRef start; - _PyStackRef stop; - _PyStackRef step = PyStackRef_NULL; + _PyStackRef *args; _PyStackRef slice; - if (oparg == 3) { step = stack_pointer[-((oparg == 3) ? 1 : 0)]; } - stop = stack_pointer[-1 - ((oparg == 3) ? 1 : 0)]; - start = stack_pointer[-2 - ((oparg == 3) ? 1 : 0)]; - PyObject *start_o = PyStackRef_AsPyObjectBorrow(start); - PyObject *stop_o = PyStackRef_AsPyObjectBorrow(stop); - PyObject *step_o = PyStackRef_AsPyObjectBorrow(step); + args = &stack_pointer[-oparg]; + PyObject *start_o = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *stop_o = PyStackRef_AsPyObjectBorrow(args[1]); + PyObject *step_o = oparg == 3 ? PyStackRef_AsPyObjectBorrow(args[2]) : NULL; PyObject *slice_o = PySlice_New(start_o, stop_o, step_o); - PyStackRef_CLOSE(start); - PyStackRef_CLOSE(stop); - PyStackRef_XCLOSE(step); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (slice_o == NULL) { - stack_pointer += -2 - ((oparg == 3) ? 1 : 0); + stack_pointer += -oparg; assert(WITHIN_STACK_BOUNDS()); goto error; } slice = PyStackRef_FromPyObjectSteal(slice_o); - stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; - stack_pointer += -1 - ((oparg == 3) ? 1 : 0); + stack_pointer[-oparg] = slice; + stack_pointer += 1 - oparg; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -808,11 +911,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg); STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o); @@ -838,10 +939,8 @@ _PyStackRef *values; _PyStackRef tup; values = &stack_pointer[-oparg]; - PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); + PyObject *tup_o = _PyTuple_FromStackRefStealOnSuccess(values, oparg); if (tup_o == NULL) { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } tup = PyStackRef_FromPyObjectSteal(tup_o); @@ -864,7 +963,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(CALL); - PREDICTED(CALL); + PREDICTED_CALL:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; _PyStackRef *callable; @@ -904,7 +1003,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } } // _DO_CALL @@ -915,8 +1016,9 @@ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } // Check if the call can be inlined or not @@ -929,7 +1031,7 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, total_args, NULL, frame + arguments, total_args, NULL, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); // Manipulate stack directly since we leave using DISPATCH_INLINED(). @@ -944,17 +1046,16 @@ DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -965,7 +1066,7 @@ STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); if (opcode == INSTRUMENTED_CALL) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); + &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(arguments[0]); if (res_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _Py_call_instrumentation_exc2( @@ -986,8 +1087,9 @@ } assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); } if (res_o == NULL) { stack_pointer += -2 - oparg; @@ -1007,7 +1109,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1020,6 +1124,7 @@ TARGET(CALL_ALLOC_AND_ENTER_INIT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_ALLOC_AND_ENTER_INIT); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); @@ -1065,7 +1170,9 @@ self[0] = PyStackRef_FromPyObjectSteal(self_o); _PyStackRef temp = callable[0]; init[0] = PyStackRef_FromPyObjectNew(init_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } // _CREATE_INIT_FRAME { @@ -1118,13 +1225,12 @@ TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_BOUND_METHOD_EXACT_ARGS); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *null; - _PyStackRef *func; - _PyStackRef *self; _PyStackRef *self_or_null; _PyStackRef *args; _PyInterpreterFrame *new_frame; @@ -1142,19 +1248,20 @@ } // _INIT_CALL_BOUND_METHOD_EXACT_ARGS { - func = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; + self_or_null = null; + assert(PyStackRef_IsNull(self_or_null[0])); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); STAT_INC(CALL, hit); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } // flush // _CHECK_FUNCTION_VERSION { - callable = &stack_pointer[-2 - oparg]; uint32_t func_version = read_u32(&this_instr[2].cache); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); DEOPT_IF(!PyFunction_Check(callable_o), CALL); @@ -1163,7 +1270,6 @@ } // _CHECK_FUNCTION_EXACT_ARGS { - self_or_null = &stack_pointer[-1 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); assert(PyFunction_Check(callable_o)); PyFunctionObject *func = (PyFunctionObject *)callable_o; @@ -1221,13 +1327,12 @@ TARGET(CALL_BOUND_METHOD_GENERAL) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_BOUND_METHOD_GENERAL); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *null; - _PyStackRef *method; - _PyStackRef *self; _PyStackRef *self_or_null; _PyStackRef *args; _PyInterpreterFrame *new_frame; @@ -1250,23 +1355,22 @@ } // _EXPAND_METHOD { - method = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; + self_or_null = null; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - assert(PyStackRef_IsNull(null[0])); + assert(PyStackRef_IsNull(self_or_null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } // flush // _PY_FRAME_GENERAL { args = &stack_pointer[-oparg]; - self_or_null = &stack_pointer[-1 - oparg]; - callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; @@ -1335,37 +1439,36 @@ self_or_null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); + DEOPT_IF(!PyType_Check(callable_o), CALL); + PyTypeObject *tp = (PyTypeObject *)callable_o; int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } - DEOPT_IF(!PyType_Check(callable_o), CALL); - PyTypeObject *tp = (PyTypeObject *)callable_o; DEOPT_IF(tp->tp_vectorcall == NULL, CALL); STAT_INC(CALL, hit); - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -1384,7 +1487,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1414,8 +1519,9 @@ /* Builtin METH_FASTCALL functions, without keywords */ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } DEOPT_IF(!PyCFunction_CheckExact(callable_o), CALL); @@ -1423,18 +1529,16 @@ STAT_INC(CALL, hit); PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable_o); /* res = func(self, args, nargs) */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)( @@ -1444,11 +1548,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -1467,7 +1571,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1497,42 +1603,41 @@ /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } DEOPT_IF(!PyCFunction_CheckExact(callable_o), CALL); DEOPT_IF(PyCFunction_GET_FLAGS(callable_o) != (METH_FASTCALL | METH_KEYWORDS), CALL); STAT_INC(CALL, hit); - /* res = func(self, args, nargs, kwnames) */ + /* res = func(self, arguments, nargs, kwnames) */ _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFastWithKeywords cfunc = (PyCFunctionFastWithKeywords)(void(*)(void)) PyCFunction_GET_FUNCTION(callable_o); stack_pointer = _PyFrame_GetStackPointer(frame); - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -1551,7 +1656,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1599,11 +1706,15 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); if (res_o == NULL) { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } res = PyStackRef_FromPyObjectSteal(res_o); @@ -1613,19 +1724,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 1 + oparg; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -1634,26 +1747,28 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(CALL_FUNCTION_EX); - PREDICTED(CALL_FUNCTION_EX); + PREDICTED_CALL_FUNCTION_EX:; _Py_CODEUNIT* const this_instr = next_instr - 1; (void)this_instr; _PyStackRef func; _PyStackRef callargs; - _PyStackRef kwargs_in = PyStackRef_NULL; + _PyStackRef kwargs_in; _PyStackRef tuple; - _PyStackRef kwargs_out = PyStackRef_NULL; + _PyStackRef kwargs_out; _PyStackRef func_st; + _PyStackRef null; _PyStackRef callargs_st; - _PyStackRef kwargs_st = PyStackRef_NULL; + _PyStackRef kwargs_st; _PyStackRef result; // _MAKE_CALLARGS_A_TUPLE { - if (oparg & 1) { kwargs_in = stack_pointer[-(oparg & 1)]; } - callargs = stack_pointer[-1 - (oparg & 1)]; - func = stack_pointer[-3 - (oparg & 1)]; + kwargs_in = stack_pointer[-1]; + callargs = stack_pointer[-2]; + func = stack_pointer[-4]; PyObject *callargs_o = PyStackRef_AsPyObjectBorrow(callargs); if (PyTuple_CheckExact(callargs_o)) { tuple = callargs; + kwargs_out = kwargs_in; } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -1668,16 +1783,24 @@ if (tuple_o == NULL) { goto error; } + kwargs_out = kwargs_in; + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callargs); + stack_pointer = _PyFrame_GetStackPointer(frame); tuple = PyStackRef_FromPyObjectSteal(tuple_o); + stack_pointer += 2; + assert(WITHIN_STACK_BOUNDS()); } - kwargs_out = kwargs_in; } // _DO_CALL_FUNCTION_EX { kwargs_st = kwargs_out; callargs_st = tuple; + null = stack_pointer[-3]; func_st = func; + (void)null; PyObject *func = PyStackRef_AsPyObjectBorrow(func_st); // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. @@ -1691,8 +1814,8 @@ assert(PyTuple_CheckExact(callargs)); PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ? PyTuple_GET_ITEM(callargs, 0) : &_PyInstrumentation_MISSING; - stack_pointer[-1 - (oparg & 1)] = callargs_st; - if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_st; + stack_pointer[-2] = callargs_st; + stack_pointer[-1] = kwargs_st; _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_call_instrumentation_2args( tstate, PY_MONITORING_EVENT_CALL, @@ -1735,7 +1858,7 @@ Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); - stack_pointer += -2 - (oparg & 1); + stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( @@ -1743,7 +1866,7 @@ nargs, callargs, kwargs, frame); stack_pointer = _PyFrame_GetStackPointer(frame); // Need to sync the stack since we exit with DISPATCH_INLINED. - stack_pointer += -1; + stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); if (new_frame == NULL) { goto error; @@ -1756,20 +1879,28 @@ assert(PyTuple_CheckExact(callargs)); PyObject *kwargs = PyStackRef_AsPyObjectBorrow(kwargs_st); assert(kwargs == NULL || PyDict_CheckExact(kwargs)); - stack_pointer[-1 - (oparg & 1)] = callargs_st; - if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_st; + stack_pointer[-2] = callargs_st; + stack_pointer[-1] = kwargs_st; _PyFrame_SetStackPointer(frame, stack_pointer); result_o = PyObject_Call(func, callargs, kwargs); stack_pointer = _PyFrame_GetStackPointer(frame); } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_XCLOSE(kwargs_st); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callargs_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(func_st); + stack_pointer = _PyFrame_GetStackPointer(frame); if (result_o == NULL) { - stack_pointer += -3 - (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); goto error; } result = PyStackRef_FromPyObjectSteal(result_o); @@ -1779,19 +1910,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3 - (oparg & 1)] = result; - stack_pointer += -2 - (oparg & 1); + stack_pointer[0] = result; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2 + (oparg & 1); + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3 - (oparg & 1)] = result; - stack_pointer += -2 - (oparg & 1); + stack_pointer[0] = result; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -1808,7 +1941,9 @@ PyObject *res_o = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + if (res_o == NULL) { + goto pop_1_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; DISPATCH(); @@ -1831,7 +1966,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value2_st); PyStackRef_CLOSE(value1_st); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -1856,16 +1993,17 @@ /* isinstance(o, o2) */ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } DEOPT_IF(total_args != 2, CALL); PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable_o != interp->callable_cache.isinstance, CALL); STAT_INC(CALL, hit); - _PyStackRef cls_stackref = args[1]; - _PyStackRef inst_stackref = args[0]; + _PyStackRef cls_stackref = arguments[1]; + _PyStackRef inst_stackref = arguments[0]; _PyFrame_SetStackPointer(frame, stack_pointer); int retval = PyObject_IsInstance(PyStackRef_AsPyObjectBorrow(inst_stackref), PyStackRef_AsPyObjectBorrow(cls_stackref)); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -1874,9 +2012,11 @@ } res = retval ? PyStackRef_True : PyStackRef_False; assert((!PyStackRef_IsNull(res)) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(inst_stackref); - PyStackRef_CLOSE(cls_stackref); PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -1887,7 +2027,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW); - PREDICTED(CALL_KW); + PREDICTED_CALL_KW:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; _PyStackRef *callable; @@ -1931,7 +2071,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } kwnames_out = kwnames_in; } @@ -1945,8 +2087,9 @@ PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); @@ -1961,12 +2104,16 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, positional_args, kwnames_o, frame + arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // Sync stack explicitly since we leave using DISPATCH_INLINED(). - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. @@ -1978,19 +2125,17 @@ DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - { - stack_pointer += -3 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } stack_pointer[-1] = kwnames; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -2002,7 +2147,7 @@ STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); if (opcode == INSTRUMENTED_CALL_KW) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); + &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(arguments[0]); if (res_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _Py_call_instrumentation_exc2( @@ -2021,12 +2166,12 @@ } } } - PyStackRef_CLOSE(kwnames); - assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); } + PyStackRef_CLOSE(kwnames); if (res_o == NULL) { stack_pointer += -3 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -2042,14 +2187,13 @@ TARGET(CALL_KW_BOUND_METHOD) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW_BOUND_METHOD); static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *null; _PyStackRef kwnames; - _PyStackRef *method; - _PyStackRef *self; _PyStackRef *self_or_null; _PyStackRef *args; _PyInterpreterFrame *new_frame; @@ -2072,29 +2216,29 @@ } // _EXPAND_METHOD_KW { - method = &stack_pointer[-3 - oparg]; - self = &stack_pointer[-2 - oparg]; + self_or_null = null; + assert(PyStackRef_IsNull(self_or_null[0])); _PyStackRef callable_s = callable[0]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); - assert(PyStackRef_IsNull(null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable_s); + stack_pointer = _PyFrame_GetStackPointer(frame); } // flush // _PY_FRAME_KW { kwnames = stack_pointer[-1]; args = &stack_pointer[-1 - oparg]; - self_or_null = &stack_pointer[-2 - oparg]; - callable = &stack_pointer[-3 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); @@ -2105,13 +2249,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *temp = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, positional_args, kwnames_o, frame + arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (temp == NULL) { goto error; @@ -2174,24 +2322,23 @@ #endif PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - { - stack_pointer += -3 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); @@ -2201,15 +2348,20 @@ positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); goto error; } @@ -2220,25 +2372,28 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3 - oparg] = res; - stack_pointer += -2 - oparg; + stack_pointer[-2 - oparg] = res; + stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2 + oparg; + if (err != 0) { + goto error; + } + stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3 - oparg] = res; - stack_pointer += -2 - oparg; + stack_pointer[-2 - oparg] = res; + stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } TARGET(CALL_KW_PY) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW_PY); static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); @@ -2269,8 +2424,9 @@ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); @@ -2281,13 +2437,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *temp = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, positional_args, kwnames_o, frame + arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (temp == NULL) { goto error; @@ -2359,11 +2519,17 @@ if (res_o == NULL) { GOTO_ERROR(error); } - PyStackRef_CLOSE(callable[0]); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -2392,17 +2558,25 @@ STAT_INC(CALL, hit); int err = _PyList_AppendTakeRef((PyListObject *)self_o, PyStackRef_AsPyObjectSteal(arg)); UNLOCK_OBJECT(self_o); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable); - if (err) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } #if TIER_ONE // Skip the following POP_TOP. This is done here in tier one, and // during trace projection in tier two: assert(next_instr->op.code == POP_TOP); SKIP_OVER(1); #endif - stack_pointer += -3; - assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -2424,8 +2598,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -2433,22 +2608,20 @@ DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); - PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *self = PyStackRef_AsPyObjectBorrow(arguments[0]); DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFast cfunc = @@ -2457,11 +2630,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Clear the stack of the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -2480,7 +2653,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2509,8 +2684,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -2518,22 +2694,20 @@ PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); PyTypeObject *d_type = method->d_common.d_type; - PyObject *self = PyStackRef_AsPyObjectBorrow(args[0]); + PyObject *self = PyStackRef_AsPyObjectBorrow(arguments[0]); DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); STAT_INC(CALL, hit); int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFastWithKeywords cfunc = @@ -2542,11 +2716,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - /* Free the arguments. */ - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -2565,7 +2739,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2617,11 +2793,15 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); if (res_o == NULL) { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } res = PyStackRef_FromPyObjectSteal(res_o); @@ -2631,19 +2811,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 1 + oparg; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -2666,8 +2848,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -2677,8 +2860,8 @@ DEOPT_IF(meth->ml_flags != METH_O, CALL); // CPython promises to check all non-vectorcall function calls. DEOPT_IF(tstate->c_recursion_remaining <= 0, CALL); - _PyStackRef arg_stackref = args[1]; - _PyStackRef self_stackref = args[0]; + _PyStackRef arg_stackref = arguments[1]; + _PyStackRef self_stackref = arguments[0]; DEOPT_IF(!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), method->d_common.d_type), CALL); STAT_INC(CALL, hit); @@ -2691,9 +2874,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(self_stackref); - PyStackRef_CLOSE(arg_stackref); PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -2712,7 +2897,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2750,23 +2937,22 @@ #endif PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - PyStackRef_CLOSE(self_or_null[0]); + PyStackRef_XCLOSE(self_or_null[0]); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -2777,8 +2963,9 @@ STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); } if (res_o == NULL) { stack_pointer += -2 - oparg; @@ -2798,7 +2985,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2811,6 +3000,7 @@ TARGET(CALL_PY_EXACT_ARGS) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_PY_EXACT_ARGS); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); @@ -2892,6 +3082,7 @@ TARGET(CALL_PY_GENERAL) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_PY_GENERAL); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); @@ -2993,8 +3184,14 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Str(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -3002,19 +3199,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3044,8 +3243,14 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PySequence_Tuple(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -3053,19 +3258,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3091,10 +3298,12 @@ DEOPT_IF(callable_o != (PyObject *)&PyType_Type, CALL); STAT_INC(CALL, hit); res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o))); - PyStackRef_CLOSE(arg); stack_pointer[-3] = res; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -3121,14 +3330,18 @@ PyObject *match_o = NULL; PyObject *rest_o = NULL; _PyFrame_SetStackPointer(frame, stack_pointer); - int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + int res = _PyEval_ExceptionGroupMatch(frame, exc_value, match_type, &match_o, &rest_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(exc_value_st); PyStackRef_CLOSE(match_type_st); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } assert((match_o == NULL) == (rest_o == NULL)); - if (match_o == NULL) goto pop_2_error; + if (match_o == NULL) { + goto pop_2_error; + } if (!Py_IsNone(match_o)) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -3217,7 +3430,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(COMPARE_OP); - PREDICTED(COMPARE_OP); + PREDICTED_COMPARE_OP:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef left; @@ -3229,7 +3442,7 @@ left = stack_pointer[-2]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -3239,7 +3452,7 @@ } OPCODE_DEFERRED_INC(COMPARE_OP); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } // _COMPARE_OP { @@ -3251,15 +3464,19 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } if (oparg & 16) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int res_bool = PyObject_IsTrue(res_o); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(res_o); - if (res_bool < 0) goto error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_bool < 0) { + goto error; + } res = res_bool ? PyStackRef_True : PyStackRef_False; } else { @@ -3397,7 +3614,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(CONTAINS_OP); - PREDICTED(CONTAINS_OP); + PREDICTED_CONTAINS_OP:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef left; @@ -3430,7 +3647,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; } stack_pointer[-2] = b; @@ -3459,7 +3678,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3488,7 +3709,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3509,10 +3732,18 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *result_o = conv_fn(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (result_o == NULL) goto pop_1_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (result_o == NULL) { + goto error; + } result = PyStackRef_FromPyObjectSteal(result_o); - stack_pointer[-1] = result; + stack_pointer[0] = result; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3560,7 +3791,9 @@ int err = PyObject_DelAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -3580,7 +3813,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); goto error; } + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(oldobj); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -3668,7 +3903,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) goto pop_2_error; + if (err) { + goto pop_2_error; + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -3768,14 +4005,20 @@ } TARGET(END_FOR) { - frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(END_FOR); _PyStackRef value; value = stack_pointer[-1]; - PyStackRef_CLOSE(value); + /* Don't update instr_ptr, so that POP_ITER sees + * the FOR_ITER as the previous instruction. + * This has the benign side effect that if value is + * finalized it will see the location as the FOR_ITER's. + */ stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(value); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -3875,14 +4118,24 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Format(value_o, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } else { res = value; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-1] = res; + stack_pointer[0] = res; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3900,7 +4153,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); PyStackRef_CLOSE(fmt_spec); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -3912,7 +4167,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(FOR_ITER); - PREDICTED(FOR_ITER); + PREDICTED_FOR_ITER:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef iter; @@ -3957,10 +4212,8 @@ /* iterator ended normally */ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - PyStackRef_CLOSE(iter); - STACK_SHRINK(1); - /* Jump forward oparg, then skip following END_FOR and POP_TOP instruction */ - JUMPBY(oparg + 2); + /* Jump forward oparg, then skip following END_FOR */ + JUMPBY(oparg + 1); DISPATCH(); } next = PyStackRef_FromPyObjectSteal(next_o); @@ -4045,13 +4298,13 @@ #ifndef Py_GIL_DISABLED if (seq != NULL) { it->it_seq = NULL; + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(seq); + stack_pointer = _PyFrame_GetStackPointer(frame); } #endif - PyStackRef_CLOSE(iter); - STACK_SHRINK(1); - /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */ - JUMPBY(oparg + 2); + /* Jump forward oparg, then skip following END_FOR instruction */ + JUMPBY(oparg + 1); DISPATCH(); } } @@ -4091,10 +4344,8 @@ assert(Py_TYPE(r) == &PyRangeIter_Type); STAT_INC(FOR_ITER, hit); if (r->len <= 0) { - STACK_SHRINK(1); - PyStackRef_CLOSE(iter); - // Jump over END_FOR and POP_TOP instructions. - JUMPBY(oparg + 2); + // Jump over END_FOR instruction. + JUMPBY(oparg + 1); DISPATCH(); } } @@ -4107,7 +4358,9 @@ r->start = value + r->step; r->len--; PyObject *res = PyLong_FromLong(value); - if (res == NULL) goto error; + if (res == NULL) { + goto error; + } next = PyStackRef_FromPyObjectSteal(res); } stack_pointer[0] = next; @@ -4139,12 +4392,12 @@ if (seq == NULL || it->it_index >= PyTuple_GET_SIZE(seq)) { if (seq != NULL) { it->it_seq = NULL; + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(seq); + stack_pointer = _PyFrame_GetStackPointer(frame); } - PyStackRef_CLOSE(iter); - STACK_SHRINK(1); - /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */ - JUMPBY(oparg + 2); + /* Jump forward oparg, then skip following END_FOR instruction */ + JUMPBY(oparg + 1); DISPATCH(); } } @@ -4192,7 +4445,9 @@ iter_o = (*getter)(obj_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(obj); - if (iter_o == NULL) goto pop_1_error; + if (iter_o == NULL) { + goto pop_1_error; + } if (Py_TYPE(iter_o)->tp_as_async == NULL || Py_TYPE(iter_o)->tp_as_async->am_anext == NULL) { stack_pointer += -1; @@ -4202,8 +4457,8 @@ "'async for' received an object from __aiter__ " "that does not implement __anext__: %.100s", Py_TYPE(iter_o)->tp_name); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(iter_o); + stack_pointer = _PyFrame_GetStackPointer(frame); goto error; } iter = PyStackRef_FromPyObjectSteal(iter_o); @@ -4242,7 +4497,9 @@ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) goto pop_1_error; + if (iter_o == NULL) { + goto pop_1_error; + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; DISPATCH(); @@ -4260,7 +4517,9 @@ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) goto pop_1_error; + if (iter_o == NULL) { + goto pop_1_error; + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; DISPATCH(); @@ -4277,9 +4536,13 @@ _PyFrame_SetStackPointer(frame, stack_pointer); Py_ssize_t len_i = PyObject_Length(PyStackRef_AsPyObjectBorrow(obj)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (len_i < 0) goto error; + if (len_i < 0) { + goto error; + } PyObject *len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) goto error; + if (len_o == NULL) { + goto error; + } len = PyStackRef_FromPyObjectSteal(len_o); stack_pointer[0] = len; stack_pointer += 1; @@ -4341,7 +4604,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) goto error; + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -4366,7 +4631,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(level); PyStackRef_CLOSE(fromlist); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -4400,7 +4667,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } } // _MONITOR_CALL @@ -4428,7 +4697,9 @@ frame, this_instr, function, arg0 ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } } // _DO_CALL { @@ -4437,8 +4708,9 @@ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } // Check if the call can be inlined or not @@ -4451,7 +4723,7 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( tstate, callable[0], locals, - args, total_args, NULL, frame + arguments, total_args, NULL, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); // Manipulate stack directly since we leave using DISPATCH_INLINED(). @@ -4466,17 +4738,16 @@ DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - STACKREFS_TO_PYOBJECTS(args, total_args, args_o); + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); - } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -4487,7 +4758,7 @@ STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); if (opcode == INSTRUMENTED_CALL) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]); + &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(arguments[0]); if (res_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _Py_call_instrumentation_exc2( @@ -4508,8 +4779,9 @@ } assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); - for (int i = 0; i < total_args; i++) { - PyStackRef_CLOSE(args[i]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); } if (res_o == NULL) { stack_pointer += -2 - oparg; @@ -4529,7 +4801,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -4544,7 +4818,8 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(INSTRUMENTED_CALL_FUNCTION_EX); - GO_TO_INSTRUCTION(CALL_FUNCTION_EX); + + goto PREDICTED_CALL_FUNCTION_EX; } TARGET(INSTRUMENTED_CALL_KW) { @@ -4566,13 +4841,15 @@ tstate, PY_MONITORING_EVENT_CALL, frame, this_instr, function, arg); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - GO_TO_INSTRUCTION(CALL_KW); + goto PREDICTED_CALL_KW; } TARGET(INSTRUMENTED_END_FOR) { - _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + _Py_CODEUNIT* const this_instr = next_instr; (void)this_instr; next_instr += 1; INSTRUCTION_STATS(INSTRUMENTED_END_FOR); @@ -4616,10 +4893,12 @@ } } val = value; - PyStackRef_CLOSE(receiver); stack_pointer[-2] = val; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(receiver); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -4636,6 +4915,7 @@ stack_pointer = _PyFrame_GetStackPointer(frame); if (next != NULL) { PUSH(PyStackRef_FromPyObjectSteal(next)); + INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); } else { if (_PyErr_Occurred(tstate)) { @@ -4653,11 +4933,8 @@ /* iterator ended normally */ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - STACK_SHRINK(1); - PyStackRef_CLOSE(iter_stackref); - /* Skip END_FOR and POP_TOP */ - _Py_CODEUNIT *target = next_instr + oparg + 2; - INSTRUMENTED_JUMP(this_instr, target, PY_MONITORING_EVENT_BRANCH_RIGHT); + /* Skip END_FOR */ + JUMPBY(oparg + 1); } DISPATCH(); } @@ -4671,7 +4948,9 @@ int next_opcode = _Py_call_instrumentation_instruction( tstate, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - if (next_opcode < 0) goto error; + if (next_opcode < 0) { + goto error; + } next_instr = this_instr; if (_PyOpcode_Caches[next_opcode]) { PAUSE_ADAPTIVE_COUNTER(next_instr[1].counter); @@ -4695,7 +4974,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } // _MONITOR_JUMP_BACKWARD @@ -4724,8 +5005,9 @@ if (tstate->tracing) { PyCodeObject *code = _PyFrame_GetCode(frame); _PyFrame_SetStackPointer(frame, stack_pointer); - original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyFrame_GetBytecode(frame))].original_opcode; + int index = (int)(this_instr - _PyFrame_GetBytecode(frame)); stack_pointer = _PyFrame_GetStackPointer(frame); + original_opcode = code->_co_monitoring->lines->data[index*code->_co_monitoring->lines->bytes_per_entry]; next_instr = this_instr; } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -4760,15 +5042,34 @@ // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we // don't want to specialize instrumented instructions PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - GO_TO_INSTRUCTION(LOAD_SUPER_ATTR); + goto PREDICTED_LOAD_SUPER_ATTR; } TARGET(INSTRUMENTED_NOT_TAKEN) { + _Py_CODEUNIT* const prev_instr = frame->instr_ptr; _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; (void)this_instr; next_instr += 1; INSTRUCTION_STATS(INSTRUMENTED_NOT_TAKEN); - INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); + (void)this_instr; // INSTRUMENTED_JUMP requires this_instr + INSTRUMENTED_JUMP(prev_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); + DISPATCH(); + } + + TARGET(INSTRUMENTED_POP_ITER) { + _Py_CODEUNIT* const prev_instr = frame->instr_ptr; + _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; + next_instr += 1; + INSTRUCTION_STATS(INSTRUMENTED_POP_ITER); + _PyStackRef iter; + iter = stack_pointer[-1]; + INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(iter); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -4801,7 +5102,9 @@ INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_BRANCH_RIGHT); } else { + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); } DISPATCH(); } @@ -4816,7 +5119,9 @@ int jump = !PyStackRef_IsNone(value_stackref); RECORD_BRANCH_TAKEN(this_instr[1].cache, jump); if (jump) { + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_BRANCH_RIGHT); } DISPATCH(); @@ -4852,7 +5157,9 @@ _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (bytecode == NULL) goto error; + if (bytecode == NULL) { + goto error; + } _PyFrame_SetStackPointer(frame, stack_pointer); ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -4891,7 +5198,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } } @@ -4901,7 +5210,9 @@ int err = _Py_call_instrumentation( tstate, oparg > 0, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } if (frame->instr_ptr != this_instr) { /* Instrumentation has jumped */ next_instr = frame->instr_ptr; @@ -4926,14 +5237,14 @@ tstate, PY_MONITORING_EVENT_PY_RETURN, frame, this_instr, PyStackRef_AsPyObjectBorrow(val)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } } // _RETURN_VALUE { retval = val; - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -4985,9 +5296,7 @@ // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -5030,7 +5339,7 @@ INSTRUCTION_STATS(INTERPRETER_EXIT); _PyStackRef retval; retval = stack_pointer[-1]; - assert(frame == &entry_frame); + assert(frame->owner == FRAME_OWNED_BY_INTERPRETER); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous frame and return. */ tstate->current_frame = frame->previous; @@ -5063,10 +5372,57 @@ } TARGET(JUMP_BACKWARD) { + frame->instr_ptr = next_instr; + next_instr += 2; + INSTRUCTION_STATS(JUMP_BACKWARD); + PREDICTED_JUMP_BACKWARD:; + _Py_CODEUNIT* const this_instr = next_instr - 2; + (void)this_instr; + /* Skip 1 cache entry */ + // _SPECIALIZE_JUMP_BACKWARD + { + #if ENABLE_SPECIALIZATION + if (this_instr->op.code == JUMP_BACKWARD) { + this_instr->op.code = tstate->interp->jit ? JUMP_BACKWARD_JIT : JUMP_BACKWARD_NO_JIT; + // Need to re-dispatch so the warmup counter isn't off by one: + next_instr = this_instr; + DISPATCH_SAME_OPARG(); + } + #endif + } + // _CHECK_PERIODIC + { + _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); + QSBR_QUIESCENT_STATE(tstate); + if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_HandlePending(tstate); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err != 0) { + goto error; + } + } + } + // _JUMP_BACKWARD_NO_INTERRUPT + { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + assert(oparg <= INSTR_OFFSET()); + JUMPBY(-oparg); + } + DISPATCH(); + } + + TARGET(JUMP_BACKWARD_JIT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; (void)this_instr; next_instr += 2; - INSTRUCTION_STATS(JUMP_BACKWARD); + INSTRUCTION_STATS(JUMP_BACKWARD_JIT); + static_assert(1 == 1, "incorrect cache size"); + /* Skip 1 cache entry */ // _CHECK_PERIODIC { _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); @@ -5075,19 +5431,26 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } - // _JUMP_BACKWARD + // _JUMP_BACKWARD_NO_INTERRUPT { - uint16_t the_counter = read_u16(&this_instr[1].cache); - (void)the_counter; + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); + } + // _JIT + { #ifdef _Py_TIER2 - #if ENABLE_SPECIALIZATION _Py_BackoffCounter counter = this_instr[1].counter; - if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD) { + if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD_JIT) { _Py_CODEUNIT *start = this_instr; /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ while (oparg > 255) { @@ -5100,7 +5463,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); if (optimized <= 0) { this_instr[1].counter = restart_backoff_counter(counter); - if (optimized < 0) goto error; + if (optimized < 0) { + goto error; + } } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -5114,8 +5479,7 @@ else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); } - #endif /* ENABLE_SPECIALIZATION */ - #endif /* _Py_TIER2 */ + #endif } DISPATCH(); } @@ -5129,10 +5493,43 @@ * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ + assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); DISPATCH(); } + TARGET(JUMP_BACKWARD_NO_JIT) { + frame->instr_ptr = next_instr; + next_instr += 2; + INSTRUCTION_STATS(JUMP_BACKWARD_NO_JIT); + static_assert(1 == 1, "incorrect cache size"); + /* Skip 1 cache entry */ + // _CHECK_PERIODIC + { + _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); + QSBR_QUIESCENT_STATE(tstate); + if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_HandlePending(tstate); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err != 0) { + goto error; + } + } + } + // _JUMP_BACKWARD_NO_INTERRUPT + { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + assert(oparg <= INSTR_OFFSET()); + JUMPBY(-oparg); + } + DISPATCH(); + } + TARGET(JUMP_FORWARD) { frame->instr_ptr = next_instr; next_instr += 1; @@ -5151,7 +5548,9 @@ list = stack_pointer[-2 - (oparg-1)]; int err = _PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list), PyStackRef_AsPyObjectSteal(v)); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -5198,12 +5597,12 @@ frame->instr_ptr = next_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR); - PREDICTED(LOAD_ATTR); + PREDICTED_LOAD_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 10; (void)this_instr; _PyStackRef owner; _PyStackRef attr; - _PyStackRef self_or_null = PyStackRef_NULL; + _PyStackRef *self_or_null; // _SPECIALIZE_LOAD_ATTR { owner = stack_pointer[-1]; @@ -5225,6 +5624,7 @@ /* Skip 8 cache entries */ // _LOAD_ATTR { + self_or_null = &stack_pointer[0]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); PyObject *attr_o; if (oparg & 1) { @@ -5239,7 +5639,7 @@ meth | self | arg1 | ... | argN */ assert(attr_o != NULL); // No errors on this branch - self_or_null = owner; // Transfer ownership + self_or_null[0] = owner; // Transfer ownership } else { /* meth is not an unbound method (but a regular attr, or @@ -5249,8 +5649,10 @@ meth | NULL | arg1 | ... | argN */ PyStackRef_CLOSE(owner); - if (attr_o == NULL) goto pop_1_error; - self_or_null = PyStackRef_NULL; + if (attr_o == NULL) { + goto pop_1_error; + } + self_or_null[0] = PyStackRef_NULL; } } else { @@ -5259,21 +5661,21 @@ attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (attr_o == NULL) goto pop_1_error; - /* We need to define self_or_null on all paths */ - self_or_null = PyStackRef_NULL; + if (attr_o == NULL) { + goto pop_1_error; + } } attr = PyStackRef_FromPyObjectSteal(attr_o); } stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = self_or_null; - stack_pointer += (oparg & 1); + stack_pointer += (oparg&1); assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } TARGET(LOAD_ATTR_CLASS) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_CLASS); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5288,7 +5690,7 @@ PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); DEOPT_IF(!PyType_Check(owner_o), LOAD_ATTR); assert(type_version != 0); - DEOPT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version, LOAD_ATTR); + DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version, LOAD_ATTR); } /* Skip 2 cache entries */ // _LOAD_ATTR_CLASS @@ -5297,9 +5699,12 @@ STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = PyStackRef_FromPyObjectNew(descr); - null = PyStackRef_NULL; PyStackRef_CLOSE(owner); } + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); @@ -5309,6 +5714,7 @@ TARGET(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5323,7 +5729,7 @@ PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); DEOPT_IF(!PyType_Check(owner_o), LOAD_ATTR); assert(type_version != 0); - DEOPT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version, LOAD_ATTR); + DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version, LOAD_ATTR); } // _GUARD_TYPE_VERSION { @@ -5338,9 +5744,12 @@ STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = PyStackRef_FromPyObjectNew(descr); - null = PyStackRef_NULL; PyStackRef_CLOSE(owner); } + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); @@ -5350,6 +5759,7 @@ TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5364,7 +5774,7 @@ DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner_o); assert(type_version != 0); - DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); + DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(cls->tp_version_tag) != type_version, LOAD_ATTR); assert(Py_IS_TYPE(getattribute, &PyFunction_Type)); PyFunctionObject *f = (PyFunctionObject *)getattribute; assert(func_version != 0); @@ -5386,6 +5796,7 @@ TARGET(LOAD_ATTR_INSTANCE_VALUE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_INSTANCE_VALUE); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5406,23 +5817,33 @@ PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_dictoffset < 0); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR); + DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(_PyObject_InlineValues(owner_o)->valid), LOAD_ATTR); } // _LOAD_ATTR_INSTANCE_VALUE { uint16_t offset = read_u16(&this_instr[4].cache); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); - PyObject *attr_o = *value_ptr; + PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr); DEOPT_IF(attr_o == NULL, LOAD_ATTR); + #ifdef Py_GIL_DISABLED + if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) { + DEOPT_IF(true, LOAD_ATTR); + } + #else + attr = PyStackRef_FromPyObjectNew(attr_o); + #endif STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - null = PyStackRef_NULL; - attr = PyStackRef_FromPyObjectSteal(attr_o); + stack_pointer[-1] = attr; + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); } /* Skip 5 cache entries */ - stack_pointer[-1] = attr; + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); assert(WITHIN_STACK_BOUNDS()); @@ -5431,12 +5852,13 @@ TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_METHOD_LAZY_DICT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); _PyStackRef owner; _PyStackRef attr; - _PyStackRef self = PyStackRef_NULL; + _PyStackRef self; /* Skip 1 cache entry */ // _GUARD_TYPE_VERSION { @@ -5450,7 +5872,7 @@ { uint16_t dictoffset = read_u16(&this_instr[4].cache); char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset; - PyObject *dict = *(PyObject **)ptr; + PyObject *dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*(PyObject **)ptr); /* This object has a __dict__, just not yet created */ DEOPT_IF(dict != NULL, LOAD_ATTR); } @@ -5474,12 +5896,13 @@ TARGET(LOAD_ATTR_METHOD_NO_DICT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_METHOD_NO_DICT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); _PyStackRef owner; _PyStackRef attr; - _PyStackRef self = PyStackRef_NULL; + _PyStackRef self; /* Skip 1 cache entry */ // _GUARD_TYPE_VERSION { @@ -5510,12 +5933,13 @@ TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_METHOD_WITH_VALUES); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); _PyStackRef owner; _PyStackRef attr; - _PyStackRef self = PyStackRef_NULL; + _PyStackRef self; /* Skip 1 cache entry */ // _GUARD_TYPE_VERSION { @@ -5529,14 +5953,16 @@ { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR); + PyDictValues *ivs = _PyObject_InlineValues(owner_o); + DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(ivs->valid), LOAD_ATTR); } // _GUARD_KEYS_VERSION { uint32_t keys_version = read_u32(&this_instr[4].cache); PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; - DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); + PyDictKeysObject *keys = owner_heap_type->ht_cached_keys; + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version, LOAD_ATTR); } // _LOAD_ATTR_METHOD_WITH_VALUES { @@ -5558,6 +5984,7 @@ TARGET(LOAD_ATTR_MODULE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_MODULE); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5597,11 +6024,16 @@ attr = PyStackRef_FromPyObjectSteal(attr_o); #endif STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; + stack_pointer[-1] = attr; + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); } /* Skip 5 cache entries */ - stack_pointer[-1] = attr; + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); assert(WITHIN_STACK_BOUNDS()); @@ -5610,6 +6042,7 @@ TARGET(LOAD_ATTR_NONDESCRIPTOR_NO_DICT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_NONDESCRIPTOR_NO_DICT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5641,6 +6074,7 @@ TARGET(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5659,14 +6093,16 @@ { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES); - DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR); + PyDictValues *ivs = _PyObject_InlineValues(owner_o); + DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(ivs->valid), LOAD_ATTR); } // _GUARD_KEYS_VERSION { uint32_t keys_version = read_u32(&this_instr[4].cache); PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; - DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); + PyDictKeysObject *keys = owner_heap_type->ht_cached_keys; + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version, LOAD_ATTR); } // _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES { @@ -5683,6 +6119,7 @@ TARGET(LOAD_ATTR_PROPERTY) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_PROPERTY); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5748,6 +6185,7 @@ TARGET(LOAD_ATTR_SLOT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_SLOT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5767,15 +6205,23 @@ { uint16_t index = read_u16(&this_instr[4].cache); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - char *addr = (char *)owner_o + index; - PyObject *attr_o = *(PyObject **)addr; + PyObject **addr = (PyObject **)((char *)owner_o + index); + PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr); DEOPT_IF(attr_o == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - null = PyStackRef_NULL; + #ifdef Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr); + DEOPT_IF(!increfed, LOAD_ATTR); + #else attr = PyStackRef_FromPyObjectNew(attr_o); + #endif + STAT_INC(LOAD_ATTR, hit); PyStackRef_CLOSE(owner); } /* Skip 5 cache entries */ + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); @@ -5785,10 +6231,12 @@ TARGET(LOAD_ATTR_WITH_HINT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_WITH_HINT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); _PyStackRef owner; + PyDictObject *dict; _PyStackRef attr; _PyStackRef null = PyStackRef_NULL; /* Skip 1 cache entry */ @@ -5804,30 +6252,47 @@ { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictObject *dict = _PyObject_GetManagedDict(owner_o); - DEOPT_IF(dict == NULL, LOAD_ATTR); - assert(PyDict_CheckExact((PyObject *)dict)); + PyDictObject *dict_o = _PyObject_GetManagedDict(owner_o); + DEOPT_IF(dict_o == NULL, LOAD_ATTR); + assert(PyDict_CheckExact((PyObject *)dict_o)); + dict = dict_o; } // _LOAD_ATTR_WITH_HINT { uint16_t hint = read_u16(&this_instr[4].cache); - PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject *attr_o; - PyDictObject *dict = _PyObject_GetManagedDict(owner_o); - DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR); + if (!LOCK_OBJECT(dict)) { + DEOPT_IF(true, LOAD_ATTR); + } + if (hint >= (size_t)dict->ma_keys->dk_nentries) { + UNLOCK_OBJECT(dict); + DEOPT_IF(true, LOAD_ATTR); + } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); - DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys), LOAD_ATTR); + if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) { + UNLOCK_OBJECT(dict); + DEOPT_IF(true, LOAD_ATTR); + } PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - DEOPT_IF(ep->me_key != name, LOAD_ATTR); + if (ep->me_key != name) { + UNLOCK_OBJECT(dict); + DEOPT_IF(true, LOAD_ATTR); + } attr_o = ep->me_value; - DEOPT_IF(attr_o == NULL, LOAD_ATTR); + if (attr_o == NULL) { + UNLOCK_OBJECT(dict); + DEOPT_IF(true, LOAD_ATTR); + } STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr_o); - attr = PyStackRef_FromPyObjectSteal(attr_o); - null = PyStackRef_NULL; + attr = PyStackRef_FromPyObjectNew(attr_o); + UNLOCK_OBJECT(dict); PyStackRef_CLOSE(owner); } /* Skip 5 cache entries */ + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); @@ -5844,7 +6309,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (bc_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _PyErr_SetString(tstate, PyExc_NameError, @@ -5881,28 +6348,62 @@ DISPATCH(); } - TARGET(LOAD_CONST) { + TARGET(LOAD_CONST) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(LOAD_CONST); + PREDICTED_LOAD_CONST:; + _Py_CODEUNIT* const this_instr = next_instr - 1; + (void)this_instr; + _PyStackRef value; + /* We can't do this in the bytecode compiler as + * marshalling can intern strings and make them immortal. */ + PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); + value = PyStackRef_FromPyObjectNew(obj); + #if ENABLE_SPECIALIZATION_FT + #ifdef Py_GIL_DISABLED + uint8_t expected = LOAD_CONST; + if (!_Py_atomic_compare_exchange_uint8( + &this_instr->op.code, &expected, + _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) { + // We might lose a race with instrumentation, which we don't care about. + assert(expected >= MIN_INSTRUMENTED_OPCODE); + } + #else + if (this_instr->op.code == LOAD_CONST) { + this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL; + } + #endif + #endif + stack_pointer[0] = value; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + + TARGET(LOAD_CONST_IMMORTAL) { frame->instr_ptr = next_instr; next_instr += 1; - INSTRUCTION_STATS(LOAD_CONST); - PREDICTED(LOAD_CONST); + INSTRUCTION_STATS(LOAD_CONST_IMMORTAL); + static_assert(0 == 0, "incorrect cache size"); _PyStackRef value; - value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg)); + PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); + assert(_Py_IsImmortal(obj)); + value = PyStackRef_FromPyObjectImmortal(obj); stack_pointer[0] = value; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } - TARGET(LOAD_CONST_IMMORTAL) { + TARGET(LOAD_CONST_MORTAL) { frame->instr_ptr = next_instr; next_instr += 1; - INSTRUCTION_STATS(LOAD_CONST_IMMORTAL); + INSTRUCTION_STATS(LOAD_CONST_MORTAL); static_assert(0 == 0, "incorrect cache size"); _PyStackRef value; PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); - assert(_Py_IsImmortal(obj)); - value = PyStackRef_FromPyObjectImmortal(obj); + value = PyStackRef_FromPyObjectNew(obj); stack_pointer[0] = value; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); @@ -6024,9 +6525,15 @@ goto error; } } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(class_dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); value = PyStackRef_FromPyObjectSteal(value_o); - stack_pointer[-1] = value; + stack_pointer[0] = value; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -6043,7 +6550,9 @@ int err = PyMapping_GetOptionalItem(PyStackRef_AsPyObjectBorrow(mod_or_class_dict), name, &v_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(mod_or_class_dict); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } if (v_o == NULL) { if (PyDict_CheckExact(GLOBALS()) && PyDict_CheckExact(BUILTINS())) @@ -6075,13 +6584,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(GLOBALS(), name, &v_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (v_o == NULL) { /* namespace 2: builtins */ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(BUILTINS(), name, &v_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (v_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_FormatExcCheckArg( @@ -6104,7 +6617,7 @@ frame->instr_ptr = next_instr; next_instr += 5; INSTRUCTION_STATS(LOAD_GLOBAL); - PREDICTED(LOAD_GLOBAL); + PREDICTED_LOAD_GLOBAL:; _Py_CODEUNIT* const this_instr = next_instr - 5; (void)this_instr; _PyStackRef *res; @@ -6136,7 +6649,12 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); stack_pointer = _PyFrame_GetStackPointer(frame); - if (PyStackRef_IsNull(*res)) goto error; + if (PyStackRef_IsNull(*res)) { + goto error; + } + } + // _PUSH_NULL_CONDITIONAL + { null = PyStackRef_NULL; } if (oparg & 1) stack_pointer[1] = null; @@ -6147,6 +6665,7 @@ TARGET(LOAD_GLOBAL_BUILTIN) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(LOAD_GLOBAL_BUILTIN); static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); @@ -6187,6 +6706,9 @@ res = PyStackRef_FromPyObjectSteal(res_o); #endif STAT_INC(LOAD_GLOBAL, hit); + } + // _PUSH_NULL_CONDITIONAL + { null = PyStackRef_NULL; } stack_pointer[0] = res; @@ -6198,6 +6720,7 @@ TARGET(LOAD_GLOBAL_MODULE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(LOAD_GLOBAL_MODULE); static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); @@ -6230,6 +6753,9 @@ res = PyStackRef_FromPyObjectSteal(res_o); #endif STAT_INC(LOAD_GLOBAL, hit); + } + // _PUSH_NULL_CONDITIONAL + { null = PyStackRef_NULL; } stack_pointer[0] = res; @@ -6268,7 +6794,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *v_o = _PyEval_LoadName(tstate, frame, name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (v_o == NULL) goto error; + if (v_o == NULL) { + goto error; + } v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; @@ -6331,7 +6859,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(LOAD_SUPER_ATTR); - PREDICTED(LOAD_SUPER_ATTR); + PREDICTED_LOAD_SUPER_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef global_super_st; @@ -6407,16 +6935,23 @@ PyStackRef_CLOSE(global_super_st); PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(self_st); - if (super == NULL) goto pop_3_error; + if (super == NULL) { + goto pop_3_error; + } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *attr_o = PyObject_GetAttr(super, name); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(super); - if (attr_o == NULL) goto error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (attr_o == NULL) { + goto error; + } attr = PyStackRef_FromPyObjectSteal(attr_o); + } + // _PUSH_NULL_CONDITIONAL + { null = PyStackRef_NULL; } stack_pointer[0] = attr; @@ -6453,7 +6988,9 @@ PyStackRef_CLOSE(global_super_st); PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(self_st); - if (attr == NULL) goto pop_3_error; + if (attr == NULL) { + goto pop_3_error; + } attr_st = PyStackRef_FromPyObjectSteal(attr); stack_pointer[-3] = attr_st; stack_pointer += -2; @@ -6489,18 +7026,23 @@ PyObject *attr_o = _PySuper_Lookup(cls, self, name, Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); stack_pointer = _PyFrame_GetStackPointer(frame); - PyStackRef_CLOSE(global_super_st); - PyStackRef_CLOSE(class_st); if (attr_o == NULL) { - PyStackRef_CLOSE(self_st); - goto pop_3_error; + goto error; } if (method_found) { self_or_null = self_st; // transfer ownership } else { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_st); + stack_pointer = _PyFrame_GetStackPointer(frame); self_or_null = PyStackRef_NULL; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); } + PyStackRef_CLOSE(global_super_st); + PyStackRef_CLOSE(class_st); attr = PyStackRef_FromPyObjectSteal(attr_o); stack_pointer[-3] = attr; stack_pointer[-2] = self_or_null; @@ -6536,12 +7078,20 @@ PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(codeobj_st); - if (func_obj == NULL) goto pop_1_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (func_obj == NULL) { + goto error; + } _PyFunction_SetVersion( func_obj, ((PyCodeObject *)codeobj)->co_version); func = PyStackRef_FromPyObjectSteal((PyObject *)func_obj); - stack_pointer[-1] = func; + stack_pointer[0] = func; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -6566,7 +7116,9 @@ PyStackRef_AsPyObjectSteal(value) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto pop_2_error; + if (err != 0) { + goto pop_2_error; + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -6600,7 +7152,9 @@ attrs = PyStackRef_FromPyObjectSteal(attrs_o); } else { - if (_PyErr_Occurred(tstate)) goto pop_3_error; + if (_PyErr_Occurred(tstate)) { + goto pop_3_error; + } // Error! attrs = PyStackRef_None; // Failure! } @@ -6624,7 +7178,9 @@ PyObject *values_or_none_o = _PyEval_MatchKeys(tstate, PyStackRef_AsPyObjectBorrow(subject), PyStackRef_AsPyObjectBorrow(keys)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (values_or_none_o == NULL) goto error; + if (values_or_none_o == NULL) { + goto error; + } values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o); stack_pointer[0] = values_or_none; stack_pointer += 1; @@ -6693,6 +7249,18 @@ DISPATCH(); } + TARGET(POP_ITER) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(POP_ITER); + _PyStackRef value; + value = stack_pointer[-1]; + PyStackRef_CLOSE(value); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + TARGET(POP_JUMP_IF_FALSE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; (void)this_instr; @@ -6893,8 +7461,8 @@ assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(exc); + stack_pointer = _PyFrame_GetStackPointer(frame); goto error; } stack_pointer += 1; @@ -6923,7 +7491,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(RESUME); - PREDICTED(RESUME); + PREDICTED_RESUME:; _Py_CODEUNIT* const this_instr = next_instr - 1; (void)this_instr; // _LOAD_BYTECODE @@ -6935,7 +7503,9 @@ _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (bytecode == NULL) goto error; + if (bytecode == NULL) { + goto error; + } _PyFrame_SetStackPointer(frame, stack_pointer); ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -6982,7 +7552,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } } @@ -7019,7 +7591,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); stack_pointer = _PyFrame_GetStackPointer(frame); - if (gen == NULL) goto error; + if (gen == NULL) { + goto error; + } assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *gen_frame = &gen->gi_iframe; @@ -7049,9 +7623,7 @@ _PyStackRef retval; _PyStackRef res; retval = stack_pointer[-1]; - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -7076,7 +7648,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(SEND); - PREDICTED(SEND); + PREDICTED_SEND:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef receiver; @@ -7104,7 +7676,7 @@ v = stack_pointer[-1]; PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); PyObject *retval_o; - assert(frame != &entry_frame); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); if ((tstate->interp->eval_frame == NULL) && (Py_TYPE(receiver_o) == &PyGen_Type || Py_TYPE(receiver_o) == &PyCoro_Type) && ((PyGenObject *)receiver_o)->gi_frame_state < FRAME_EXECUTING) @@ -7155,10 +7727,16 @@ goto pop_1_error; } } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(v); + stack_pointer = _PyFrame_GetStackPointer(frame); retval = PyStackRef_FromPyObjectSteal(retval_o); } - stack_pointer[-1] = retval; + stack_pointer[0] = retval; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -7230,21 +7808,29 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (ann_dict == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); ann_dict = PyDict_New(); stack_pointer = _PyFrame_GetStackPointer(frame); - if (ann_dict == NULL) goto error; + if (ann_dict == NULL) { + goto error; + } _PyFrame_SetStackPointer(frame, stack_pointer); err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), ann_dict); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(ann_dict); - if (err) goto error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } } else { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(ann_dict); + stack_pointer = _PyFrame_GetStackPointer(frame); } DISPATCH(); } @@ -7262,7 +7848,9 @@ PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7305,7 +7893,9 @@ PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7315,7 +7905,7 @@ frame->instr_ptr = next_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR); - PREDICTED(STORE_ATTR); + PREDICTED_STORE_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 5; (void)this_instr; _PyStackRef owner; @@ -7349,7 +7939,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); PyStackRef_CLOSE(owner); - if (err) goto pop_2_error; + if (err) { + goto pop_2_error; + } } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -7358,6 +7950,7 @@ TARGET(STORE_ATTR_INSTANCE_VALUE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR_INSTANCE_VALUE); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); @@ -7404,16 +7997,19 @@ _PyDictValues_AddToInsertionOrder(values, index); } UNLOCK_OBJECT(owner_o); - Py_XDECREF(old_value); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); } - stack_pointer += -2; - assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } TARGET(STORE_ATTR_SLOT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR_SLOT); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); @@ -7439,16 +8035,19 @@ PyObject *old_value = *(PyObject **)addr; FT_ATOMIC_STORE_PTR_RELEASE(*(PyObject **)addr, PyStackRef_AsPyObjectSteal(value)); UNLOCK_OBJECT(owner_o); - Py_XDECREF(old_value); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); } - stack_pointer += -2; - assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } TARGET(STORE_ATTR_WITH_HINT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR_WITH_HINT); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); @@ -7502,12 +8101,14 @@ UNLOCK_OBJECT(dict); // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, // when dict only holds the strong reference to value in ep->me_value. - Py_XDECREF(old_value); STAT_INC(STORE_ATTR, hit); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + Py_XDECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); } - stack_pointer += -2; - assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -7581,7 +8182,9 @@ int err = PyDict_SetItem(GLOBALS(), name, PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7615,7 +8218,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(v); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7655,14 +8260,16 @@ assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); err = PyObject_SetItem(PyStackRef_AsPyObjectBorrow(container), slice, PyStackRef_AsPyObjectBorrow(v)); - stack_pointer = _PyFrame_GetStackPointer(frame); Py_DECREF(slice); + stack_pointer = _PyFrame_GetStackPointer(frame); stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); } PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); - if (err) goto pop_4_error; + if (err) { + goto pop_4_error; + } } stack_pointer += -4; assert(WITHIN_STACK_BOUNDS()); @@ -7673,7 +8280,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(STORE_SUBSCR); - PREDICTED(STORE_SUBSCR); + PREDICTED_STORE_SUBSCR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef container; @@ -7707,7 +8314,9 @@ PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) goto pop_3_error; + if (err) { + goto pop_3_error; + } } stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); @@ -7734,10 +8343,14 @@ PyStackRef_AsPyObjectSteal(sub), PyStackRef_AsPyObjectSteal(value)); stack_pointer = _PyFrame_GetStackPointer(frame); - PyStackRef_CLOSE(dict_st); - if (err) goto pop_3_error; stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } DISPATCH(); } @@ -7771,11 +8384,13 @@ PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); assert(old_value != NULL); UNLOCK_OBJECT(list); // unlock before decrefs! - Py_DECREF(old_value); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); - PyStackRef_CLOSE(list_st); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(list_st); + Py_DECREF(old_value); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -7783,17 +8398,14 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(SWAP); - _PyStackRef bottom_in; - _PyStackRef top_in; - _PyStackRef top_out; - _PyStackRef bottom_out; - top_in = stack_pointer[-1]; - bottom_in = stack_pointer[-2 - (oparg-2)]; - bottom_out = bottom_in; - top_out = top_in; + _PyStackRef *bottom; + _PyStackRef *top; + top = &stack_pointer[-1]; + bottom = &stack_pointer[-2 - (oparg-2)]; + _PyStackRef temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; assert(oparg >= 2); - stack_pointer[-2 - (oparg-2)] = top_out; - stack_pointer[-1] = bottom_out; DISPATCH(); } @@ -7801,7 +8413,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(TO_BOOL); - PREDICTED(TO_BOOL); + PREDICTED_TO_BOOL:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; _PyStackRef value; @@ -7830,7 +8442,9 @@ int err = PyObject_IsTrue(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } res = err ? PyStackRef_True : PyStackRef_False; } stack_pointer[-1] = res; @@ -7839,6 +8453,7 @@ TARGET(TO_BOOL_ALWAYS_TRUE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(TO_BOOL_ALWAYS_TRUE); static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); @@ -7977,7 +8592,9 @@ PyObject *res_o = PyNumber_Invert(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + if (res_o == NULL) { + goto pop_1_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; DISPATCH(); @@ -7994,7 +8611,9 @@ PyObject *res_o = PyNumber_Negative(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + if (res_o == NULL) { + goto pop_1_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; DISPATCH(); @@ -8027,7 +8646,9 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) goto pop_1_error; + if (res == 0) { + goto pop_1_error; + } stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -8037,7 +8658,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(UNPACK_SEQUENCE); - PREDICTED(UNPACK_SEQUENCE); + PREDICTED_UNPACK_SEQUENCE:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef seq; @@ -8069,7 +8690,9 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) goto pop_1_error; + if (res == 0) { + goto pop_1_error; + } } stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); @@ -8186,7 +8809,9 @@ tb = Py_None; } else { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(tb); + stack_pointer = _PyFrame_GetStackPointer(frame); } assert(PyStackRef_LongCheck(lasti)); (void)lasti; // Shut up compiler warning if asserts are off @@ -8196,7 +8821,9 @@ PyObject *res_o = PyObject_Vectorcall(exit_func_o, stack + 2 - has_self, (3 + has_self) | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) goto error; + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -8214,9 +8841,7 @@ // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -8251,4 +8876,175 @@ assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } + + /* END INSTRUCTIONS */ +#if USE_COMPUTED_GOTOS + _unknown_opcode: +#else + EXTRA_CASES // From pycore_opcode_metadata.h, a 'case' for each unused opcode +#endif + /* Tell C compilers not to hold the opcode variable in the loop. + next_instr points the current instruction without TARGET(). */ + opcode = next_instr->op.code; + _PyErr_Format(tstate, PyExc_SystemError, + "%U:%d: unknown opcode %d", + _PyFrame_GetCode(frame)->co_filename, + PyUnstable_InterpreterFrame_GetLine(frame), + opcode); + goto error; + + } + + /* This should never be reached. Every opcode should end with DISPATCH() + or goto error. */ + Py_UNREACHABLE(); + /* BEGIN LABELS */ + + pop_4_error: + { + STACK_SHRINK(4); + goto error; + } + + pop_3_error: + { + STACK_SHRINK(3); + goto error; + } + + pop_2_error: + { + STACK_SHRINK(2); + goto error; + } + + pop_1_error: + { + STACK_SHRINK(1); + goto error; + } + + error: + { + /* Double-check exception status. */ + #ifdef NDEBUG + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_SystemError, + "error return without exception set"); + } + #else + assert(_PyErr_Occurred(tstate)); + #endif + + /* Log traceback info. */ + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + if (!_PyFrame_IsIncomplete(frame)) { + PyFrameObject *f = _PyFrame_GetFrameObject(frame); + if (f != NULL) { + PyTraceBack_Here(f); + } + } + _PyEval_MonitorRaise(tstate, frame, next_instr-1); + goto exception_unwind; + } + + exception_unwind: + { + /* We can't use frame->instr_ptr here, as RERAISE may have set it */ + int offset = INSTR_OFFSET()-1; + int level, handler, lasti; + if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { + // No handlers, so exit. + assert(_PyErr_Occurred(tstate)); + /* Pop remaining stack entries. */ + _PyStackRef *stackbase = _PyFrame_Stackbase(frame); + while (stack_pointer > stackbase) { + PyStackRef_XCLOSE(POP()); + } + assert(STACK_LEVEL() == 0); + _PyFrame_SetStackPointer(frame, stack_pointer); + monitor_unwind(tstate, frame, next_instr-1); + goto exit_unwind; + } + assert(STACK_LEVEL() >= level); + _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; + while (stack_pointer > new_top) { + PyStackRef_XCLOSE(POP()); + } + if (lasti) { + int frame_lasti = _PyInterpreterFrame_LASTI(frame); + PyObject *lasti = PyLong_FromLong(frame_lasti); + if (lasti == NULL) { + goto exception_unwind; + } + PUSH(PyStackRef_FromPyObjectSteal(lasti)); + } + /* Make the raw exception data + available to the handler, + so a program can emulate the + Python main loop. */ + PyObject *exc = _PyErr_GetRaisedException(tstate); + PUSH(PyStackRef_FromPyObjectSteal(exc)); + next_instr = _PyFrame_GetBytecode(frame) + handler; + if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + goto exception_unwind; + } + /* Resume normal execution */ + #ifdef LLTRACE + if (frame->lltrace >= 5) { + lltrace_resume_frame(frame); + } + #endif + DISPATCH(); + } + + exit_unwind: + { + assert(_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = tstate->current_frame = dying->previous; + _PyEval_FrameClearAndPop(tstate, dying); + frame->return_offset = 0; + if (frame->owner == FRAME_OWNED_BY_INTERPRETER) { + /* Restore previous frame and exit */ + tstate->current_frame = frame->previous; + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; + return NULL; + } + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + goto error; + } + + start_frame: + { + if (_Py_EnterRecursivePy(tstate)) { + goto exit_unwind; + } + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + #ifdef LLTRACE + { + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); + frame->lltrace = lltrace; + if (lltrace < 0) { + goto exit_unwind; + } + } + #endif + + #ifdef Py_DEBUG + /* _PyEval_EvalFrameDefault() must not be called with an exception set, + because it can clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!_PyErr_Occurred(tstate)); + #endif + + DISPATCH(); + } + +/* END LABELS */ #undef TIER_ONE diff --git a/Python/getopt.c b/Python/getopt.c index f64c89fa22734a..39a6938dec7663 100644 --- a/Python/getopt.c +++ b/Python/getopt.c @@ -102,7 +102,7 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) // Parse long option. if (*opt_ptr == L'\0') { if (_PyOS_opterr) { - fprintf(stderr, "expected long option\n"); + fprintf(stderr, "Expected long option\n"); } return -1; } @@ -114,7 +114,7 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) } if (!opt->name) { if (_PyOS_opterr) { - fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + fprintf(stderr, "Unknown option: %ls\n", argv[_PyOS_optind - 1]); } return '_'; } diff --git a/Python/hamt.c b/Python/hamt.c index cfd211f4541446..ed43a0449d7a01 100644 --- a/Python/hamt.c +++ b/Python/hamt.c @@ -319,6 +319,8 @@ typedef struct { Py_ssize_t a_count; } PyHamtNode_Array; +#define _PyHamtNode_Array_CAST(op) ((PyHamtNode_Array *)(op)) + typedef struct { PyObject_VAR_HEAD @@ -326,6 +328,8 @@ typedef struct { PyObject *c_array[1]; } PyHamtNode_Collision; +#define _PyHamtNode_Collision_CAST(op) ((PyHamtNode_Collision *)(op)) + static PyHamtObject * hamt_alloc(void); @@ -479,6 +483,8 @@ _hamt_dump_ident(PyUnicodeWriter *writer, int level) #endif /* Py_DEBUG */ /////////////////////////////////// Bitmap Node +#define _PyHamtNode_Bitmap_CAST(op) ((PyHamtNode_Bitmap *)(op)) + static PyHamtNode * hamt_node_bitmap_new(Py_ssize_t size) @@ -1083,30 +1089,27 @@ hamt_node_bitmap_find(PyHamtNode_Bitmap *self, } static int -hamt_node_bitmap_traverse(PyHamtNode_Bitmap *self, visitproc visit, void *arg) +hamt_node_bitmap_traverse(PyObject *op, visitproc visit, void *arg) { /* Bitmap's tp_traverse */ - - Py_ssize_t i; - - for (i = Py_SIZE(self); --i >= 0; ) { + PyHamtNode_Bitmap *self = _PyHamtNode_Bitmap_CAST(op); + for (Py_ssize_t i = Py_SIZE(self); --i >= 0;) { Py_VISIT(self->b_array[i]); } - return 0; } static void -hamt_node_bitmap_dealloc(PyHamtNode_Bitmap *self) +hamt_node_bitmap_dealloc(PyObject *self) { /* Bitmap's tp_dealloc */ - Py_ssize_t len = Py_SIZE(self); - Py_ssize_t i; + PyHamtNode_Bitmap *node = _PyHamtNode_Bitmap_CAST(self); + Py_ssize_t i, len = Py_SIZE(self); - if (Py_SIZE(self) == 0) { + if (len == 0) { /* The empty node is statically allocated. */ - assert(self == &_Py_SINGLETON(hamt_bitmap_node_empty)); + assert(node == &_Py_SINGLETON(hamt_bitmap_node_empty)); #ifdef Py_DEBUG _Py_FatalRefcountError("deallocating the empty hamt node bitmap singleton"); #else @@ -1120,11 +1123,11 @@ hamt_node_bitmap_dealloc(PyHamtNode_Bitmap *self) if (len > 0) { i = len; while (--i >= 0) { - Py_XDECREF(self->b_array[i]); + Py_XDECREF(node->b_array[i]); } } - Py_TYPE(self)->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free(self); Py_TRASHCAN_END } @@ -1489,38 +1492,30 @@ hamt_node_collision_find(PyHamtNode_Collision *self, static int -hamt_node_collision_traverse(PyHamtNode_Collision *self, - visitproc visit, void *arg) +hamt_node_collision_traverse(PyObject *op, visitproc visit, void *arg) { /* Collision's tp_traverse */ - - Py_ssize_t i; - - for (i = Py_SIZE(self); --i >= 0; ) { + PyHamtNode_Collision *self = _PyHamtNode_Collision_CAST(op); + for (Py_ssize_t i = Py_SIZE(self); --i >= 0; ) { Py_VISIT(self->c_array[i]); } - return 0; } static void -hamt_node_collision_dealloc(PyHamtNode_Collision *self) +hamt_node_collision_dealloc(PyObject *self) { /* Collision's tp_dealloc */ - Py_ssize_t len = Py_SIZE(self); - PyObject_GC_UnTrack(self); Py_TRASHCAN_BEGIN(self, hamt_node_collision_dealloc) - if (len > 0) { - + PyHamtNode_Collision *node = _PyHamtNode_Collision_CAST(self); while (--len >= 0) { - Py_XDECREF(self->c_array[len]); + Py_XDECREF(node->c_array[len]); } } - - Py_TYPE(self)->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free(self); Py_TRASHCAN_END } @@ -1868,35 +1863,27 @@ hamt_node_array_find(PyHamtNode_Array *self, } static int -hamt_node_array_traverse(PyHamtNode_Array *self, - visitproc visit, void *arg) +hamt_node_array_traverse(PyObject *op, visitproc visit, void *arg) { /* Array's tp_traverse */ - - Py_ssize_t i; - - for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { + PyHamtNode_Array *self = _PyHamtNode_Array_CAST(op); + for (Py_ssize_t i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { Py_VISIT(self->a_array[i]); } - return 0; } static void -hamt_node_array_dealloc(PyHamtNode_Array *self) +hamt_node_array_dealloc(PyObject *self) { /* Array's tp_dealloc */ - - Py_ssize_t i; - PyObject_GC_UnTrack(self); Py_TRASHCAN_BEGIN(self, hamt_node_array_dealloc) - - for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { - Py_XDECREF(self->a_array[i]); + PyHamtNode_Array *obj = _PyHamtNode_Array_CAST(self); + for (Py_ssize_t i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) { + Py_XDECREF(obj->a_array[i]); } - - Py_TYPE(self)->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free(self); Py_TRASHCAN_END } @@ -2605,6 +2592,8 @@ static PyObject * hamt_dump(PyHamtObject *self); #endif +#define _PyHamtObject_CAST(op) ((PyHamtObject *)(op)) + static PyObject * hamt_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) @@ -2613,24 +2602,27 @@ hamt_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } static int -hamt_tp_clear(PyHamtObject *self) +hamt_tp_clear(PyObject *op) { + PyHamtObject *self = _PyHamtObject_CAST(op); Py_CLEAR(self->h_root); return 0; } static int -hamt_tp_traverse(PyHamtObject *self, visitproc visit, void *arg) +hamt_tp_traverse(PyObject *op, visitproc visit, void *arg) { + PyHamtObject *self = _PyHamtObject_CAST(op); Py_VISIT(self->h_root); return 0; } static void -hamt_tp_dealloc(PyHamtObject *self) +hamt_tp_dealloc(PyObject *self) { - if (self == _empty_hamt) { + PyHamtObject *obj = _PyHamtObject_CAST(self); + if (obj == _empty_hamt) { /* The empty one is statically allocated. */ #ifdef Py_DEBUG _Py_FatalRefcountError("deallocating the empty hamt singleton"); @@ -2640,8 +2632,8 @@ hamt_tp_dealloc(PyHamtObject *self) } PyObject_GC_UnTrack(self); - if (self->h_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); + if (obj->h_weakreflist != NULL) { + PyObject_ClearWeakRefs(self); } (void)hamt_tp_clear(self); Py_TYPE(self)->tp_free(self); @@ -2673,16 +2665,18 @@ hamt_tp_richcompare(PyObject *v, PyObject *w, int op) } static int -hamt_tp_contains(PyHamtObject *self, PyObject *key) +hamt_tp_contains(PyObject *op, PyObject *key) { PyObject *val; + PyHamtObject *self = _PyHamtObject_CAST(op); return _PyHamt_Find(self, key, &val); } static PyObject * -hamt_tp_subscript(PyHamtObject *self, PyObject *key) +hamt_tp_subscript(PyObject *op, PyObject *key) { PyObject *val; + PyHamtObject *self = _PyHamtObject_CAST(op); hamt_find_t res = hamt_find(self, key, &val); switch (res) { case F_ERROR: @@ -2698,19 +2692,21 @@ hamt_tp_subscript(PyHamtObject *self, PyObject *key) } static Py_ssize_t -hamt_tp_len(PyHamtObject *self) +hamt_tp_len(PyObject *op) { + PyHamtObject *self = _PyHamtObject_CAST(op); return _PyHamt_Len(self); } static PyObject * -hamt_tp_iter(PyHamtObject *self) +hamt_tp_iter(PyObject *op) { + PyHamtObject *self = _PyHamtObject_CAST(op); return _PyHamt_NewIterKeys(self); } static PyObject * -hamt_py_set(PyHamtObject *self, PyObject *args) +hamt_py_set(PyObject *op, PyObject *args) { PyObject *key; PyObject *val; @@ -2719,11 +2715,12 @@ hamt_py_set(PyHamtObject *self, PyObject *args) return NULL; } + PyHamtObject *self = _PyHamtObject_CAST(op); return (PyObject *)_PyHamt_Assoc(self, key, val); } static PyObject * -hamt_py_get(PyHamtObject *self, PyObject *args) +hamt_py_get(PyObject *op, PyObject *args) { PyObject *key; PyObject *def = NULL; @@ -2733,6 +2730,7 @@ hamt_py_get(PyHamtObject *self, PyObject *args) } PyObject *val = NULL; + PyHamtObject *self = _PyHamtObject_CAST(op); hamt_find_t res = hamt_find(self, key, &val); switch (res) { case F_ERROR: @@ -2750,67 +2748,63 @@ hamt_py_get(PyHamtObject *self, PyObject *args) } static PyObject * -hamt_py_delete(PyHamtObject *self, PyObject *key) +hamt_py_delete(PyObject *op, PyObject *key) { + PyHamtObject *self = _PyHamtObject_CAST(op); return (PyObject *)_PyHamt_Without(self, key); } static PyObject * -hamt_py_items(PyHamtObject *self, PyObject *args) +hamt_py_items(PyObject *op, PyObject *args) { + PyHamtObject *self = _PyHamtObject_CAST(op); return _PyHamt_NewIterItems(self); } static PyObject * -hamt_py_values(PyHamtObject *self, PyObject *args) +hamt_py_values(PyObject *op, PyObject *args) { + PyHamtObject *self = _PyHamtObject_CAST(op); return _PyHamt_NewIterValues(self); } static PyObject * -hamt_py_keys(PyHamtObject *self, PyObject *Py_UNUSED(args)) +hamt_py_keys(PyObject *op, PyObject *Py_UNUSED(args)) { + PyHamtObject *self = _PyHamtObject_CAST(op); return _PyHamt_NewIterKeys(self); } #ifdef Py_DEBUG static PyObject * -hamt_py_dump(PyHamtObject *self, PyObject *Py_UNUSED(args)) +hamt_py_dump(PyObject *op, PyObject *Py_UNUSED(args)) { + PyHamtObject *self = _PyHamtObject_CAST(op); return hamt_dump(self); } #endif static PyMethodDef PyHamt_methods[] = { - {"set", _PyCFunction_CAST(hamt_py_set), METH_VARARGS, NULL}, - {"get", _PyCFunction_CAST(hamt_py_get), METH_VARARGS, NULL}, - {"delete", _PyCFunction_CAST(hamt_py_delete), METH_O, NULL}, - {"items", _PyCFunction_CAST(hamt_py_items), METH_NOARGS, NULL}, - {"keys", _PyCFunction_CAST(hamt_py_keys), METH_NOARGS, NULL}, - {"values", _PyCFunction_CAST(hamt_py_values), METH_NOARGS, NULL}, + {"set", hamt_py_set, METH_VARARGS, NULL}, + {"get", hamt_py_get, METH_VARARGS, NULL}, + {"delete", hamt_py_delete, METH_O, NULL}, + {"items", hamt_py_items, METH_NOARGS, NULL}, + {"keys", hamt_py_keys, METH_NOARGS, NULL}, + {"values", hamt_py_values, METH_NOARGS, NULL}, #ifdef Py_DEBUG - {"__dump__", _PyCFunction_CAST(hamt_py_dump), METH_NOARGS, NULL}, + {"__dump__", hamt_py_dump, METH_NOARGS, NULL}, #endif {NULL, NULL} }; static PySequenceMethods PyHamt_as_sequence = { - 0, /* sq_length */ - 0, /* sq_concat */ - 0, /* sq_repeat */ - 0, /* sq_item */ - 0, /* sq_slice */ - 0, /* sq_ass_item */ - 0, /* sq_ass_slice */ - (objobjproc)hamt_tp_contains, /* sq_contains */ - 0, /* sq_inplace_concat */ - 0, /* sq_inplace_repeat */ + .sq_contains = hamt_tp_contains, }; static PyMappingMethods PyHamt_as_mapping = { - (lenfunc)hamt_tp_len, /* mp_length */ - (binaryfunc)hamt_tp_subscript, /* mp_subscript */ + .mp_length = hamt_tp_len, + .mp_subscript = hamt_tp_subscript, }; PyTypeObject _PyHamt_Type = { @@ -2820,13 +2814,13 @@ PyTypeObject _PyHamt_Type = { .tp_methods = PyHamt_methods, .tp_as_mapping = &PyHamt_as_mapping, .tp_as_sequence = &PyHamt_as_sequence, - .tp_iter = (getiterfunc)hamt_tp_iter, - .tp_dealloc = (destructor)hamt_tp_dealloc, + .tp_iter = hamt_tp_iter, + .tp_dealloc = hamt_tp_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, .tp_richcompare = hamt_tp_richcompare, - .tp_traverse = (traverseproc)hamt_tp_traverse, - .tp_clear = (inquiry)hamt_tp_clear, + .tp_traverse = hamt_tp_traverse, + .tp_clear = hamt_tp_clear, .tp_new = hamt_tp_new, .tp_weaklistoffset = offsetof(PyHamtObject, h_weakreflist), .tp_hash = PyObject_HashNotImplemented, @@ -2841,10 +2835,10 @@ PyTypeObject _PyHamt_ArrayNode_Type = { "hamt_array_node", sizeof(PyHamtNode_Array), 0, - .tp_dealloc = (destructor)hamt_node_array_dealloc, + .tp_dealloc = hamt_node_array_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)hamt_node_array_traverse, + .tp_traverse = hamt_node_array_traverse, .tp_free = PyObject_GC_Del, .tp_hash = PyObject_HashNotImplemented, }; @@ -2854,10 +2848,10 @@ PyTypeObject _PyHamt_BitmapNode_Type = { "hamt_bitmap_node", sizeof(PyHamtNode_Bitmap) - sizeof(PyObject *), sizeof(PyObject *), - .tp_dealloc = (destructor)hamt_node_bitmap_dealloc, + .tp_dealloc = hamt_node_bitmap_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)hamt_node_bitmap_traverse, + .tp_traverse = hamt_node_bitmap_traverse, .tp_free = PyObject_GC_Del, .tp_hash = PyObject_HashNotImplemented, }; @@ -2867,10 +2861,10 @@ PyTypeObject _PyHamt_CollisionNode_Type = { "hamt_collision_node", sizeof(PyHamtNode_Collision) - sizeof(PyObject *), sizeof(PyObject *), - .tp_dealloc = (destructor)hamt_node_collision_dealloc, + .tp_dealloc = hamt_node_collision_dealloc, .tp_getattro = PyObject_GenericGetAttr, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, - .tp_traverse = (traverseproc)hamt_node_collision_traverse, + .tp_traverse = hamt_node_collision_traverse, .tp_free = PyObject_GC_Del, .tp_hash = PyObject_HashNotImplemented, }; diff --git a/Python/import.c b/Python/import.c index a9282dde633959..8cc8d3a503bffa 100644 --- a/Python/import.c +++ b/Python/import.c @@ -594,7 +594,8 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) if (PyList_SetSlice(MODULES_BY_INDEX(interp), 0, PyList_GET_SIZE(MODULES_BY_INDEX(interp)), NULL)) { - PyErr_FormatUnraisable("Exception ignored on clearing interpreters module list"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing interpreters module list"); } } @@ -4080,13 +4081,15 @@ _PyImport_FiniCore(PyInterpreterState *interp) int verbose = _PyInterpreterState_GetConfig(interp)->verbose; if (_PySys_ClearAttrString(interp, "meta_path", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.meta_path"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.meta_path"); } // XXX Pull in most of finalize_modules() in pylifecycle.c. if (_PySys_ClearAttrString(interp, "modules", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.modules"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.modules"); } _PyImport_ClearCore(interp); @@ -4111,7 +4114,7 @@ init_zipimport(PyThreadState *tstate, int verbose) PySys_WriteStderr("# installing zipimport hook\n"); } - PyObject *zipimporter = _PyImport_GetModuleAttrString("zipimport", "zipimporter"); + PyObject *zipimporter = PyImport_ImportModuleAttrString("zipimport", "zipimporter"); if (zipimporter == NULL) { _PyErr_Clear(tstate); /* No zipimporter object -- okay */ if (verbose) { @@ -4161,10 +4164,12 @@ _PyImport_FiniExternal(PyInterpreterState *interp) // XXX Uninstall importlib metapath importers here? if (_PySys_ClearAttrString(interp, "path_importer_cache", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.path_importer_cache"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.path_importer_cache"); } if (_PySys_ClearAttrString(interp, "path_hooks", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.path_hooks"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.path_hooks"); } } @@ -4174,7 +4179,7 @@ _PyImport_FiniExternal(PyInterpreterState *interp) /******************/ PyObject * -_PyImport_GetModuleAttr(PyObject *modname, PyObject *attrname) +PyImport_ImportModuleAttr(PyObject *modname, PyObject *attrname) { PyObject *mod = PyImport_Import(modname); if (mod == NULL) { @@ -4186,7 +4191,7 @@ _PyImport_GetModuleAttr(PyObject *modname, PyObject *attrname) } PyObject * -_PyImport_GetModuleAttrString(const char *modname, const char *attrname) +PyImport_ImportModuleAttrString(const char *modname, const char *attrname) { PyObject *pmodname = PyUnicode_FromString(modname); if (pmodname == NULL) { @@ -4197,7 +4202,7 @@ _PyImport_GetModuleAttrString(const char *modname, const char *attrname) Py_DECREF(pmodname); return NULL; } - PyObject *result = _PyImport_GetModuleAttr(pmodname, pattrname); + PyObject *result = PyImport_ImportModuleAttr(pmodname, pattrname); Py_DECREF(pattrname); Py_DECREF(pmodname); return result; @@ -4688,7 +4693,7 @@ _imp_create_dynamic_impl(PyObject *module, PyObject *spec, PyObject *file) * code relies on fp still being open. */ FILE *fp; if (file != NULL) { - fp = _Py_fopen_obj(info.filename, "r"); + fp = Py_fopen(info.filename, "r"); if (fp == NULL) { goto finally; } diff --git a/Python/initconfig.c b/Python/initconfig.c index 7851b86db1f6d0..4db77ef47d2362 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -169,7 +169,7 @@ static const PyConfigSpec PYCONFIG_SPEC[] = { SPEC(use_frozen_modules, BOOL, READ_ONLY, NO_SYS), SPEC(use_hash_seed, BOOL, READ_ONLY, NO_SYS), #ifdef __APPLE__ - SPEC(use_system_logger, BOOL, PUBLIC, NO_SYS), + SPEC(use_system_logger, BOOL, READ_ONLY, NO_SYS), #endif SPEC(user_site_directory, BOOL, READ_ONLY, NO_SYS), // sys.flags.no_user_site SPEC(warn_default_encoding, BOOL, READ_ONLY, NO_SYS), diff --git a/Python/instrumentation.c b/Python/instrumentation.c index e4255bfad8c41a..0e7b4810726434 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -14,6 +14,7 @@ #include "pycore_namespace.h" #include "pycore_object.h" #include "pycore_opcode_metadata.h" // IS_VALID_OPCODE, _PyOpcode_Caches +#include "pycore_opcode_utils.h" // IS_CONDITIONAL_JUMP_OPCODE #include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_UINTPTR_RELEASE #include "pycore_pyerrors.h" #include "pycore_pystate.h" // _PyInterpreterState_GET() @@ -52,7 +53,7 @@ if (bc == NULL) { \ continue; \ } \ - (func)((_Py_CODEUNIT *)bc, __VA_ARGS__); \ + (func)(code, (_Py_CODEUNIT *)bc, __VA_ARGS__); \ } \ } while (0) @@ -61,7 +62,7 @@ #define LOCK_CODE(code) #define UNLOCK_CODE() #define MODIFY_BYTECODE(code, func, ...) \ - (func)(_PyCode_CODE(code), __VA_ARGS__) + (func)(code, _PyCode_CODE(code), __VA_ARGS__) #endif @@ -95,8 +96,10 @@ static const int8_t EVENT_FOR_OPCODE[256] = { [INSTRUMENTED_POP_JUMP_IF_TRUE] = PY_MONITORING_EVENT_BRANCH_RIGHT, [INSTRUMENTED_POP_JUMP_IF_NONE] = PY_MONITORING_EVENT_BRANCH_RIGHT, [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = PY_MONITORING_EVENT_BRANCH_RIGHT, - [FOR_ITER] = PY_MONITORING_EVENT_BRANCH_RIGHT, - [INSTRUMENTED_FOR_ITER] = PY_MONITORING_EVENT_BRANCH_RIGHT, + [FOR_ITER] = PY_MONITORING_EVENT_BRANCH_LEFT, + [INSTRUMENTED_FOR_ITER] = PY_MONITORING_EVENT_BRANCH_LEFT, + [POP_ITER] = PY_MONITORING_EVENT_BRANCH_RIGHT, + [INSTRUMENTED_POP_ITER] = PY_MONITORING_EVENT_BRANCH_RIGHT, [END_FOR] = PY_MONITORING_EVENT_STOP_ITERATION, [INSTRUMENTED_END_FOR] = PY_MONITORING_EVENT_STOP_ITERATION, [END_SEND] = PY_MONITORING_EVENT_STOP_ITERATION, @@ -119,6 +122,7 @@ static const uint8_t DE_INSTRUMENT[256] = { [INSTRUMENTED_POP_JUMP_IF_NONE] = POP_JUMP_IF_NONE, [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = POP_JUMP_IF_NOT_NONE, [INSTRUMENTED_FOR_ITER] = FOR_ITER, + [INSTRUMENTED_POP_ITER] = POP_ITER, [INSTRUMENTED_END_FOR] = END_FOR, [INSTRUMENTED_END_SEND] = END_SEND, [INSTRUMENTED_LOAD_SUPER_ATTR] = LOAD_SUPER_ATTR, @@ -156,6 +160,8 @@ static const uint8_t INSTRUMENTED_OPCODES[256] = { [INSTRUMENTED_END_SEND] = INSTRUMENTED_END_SEND, [FOR_ITER] = INSTRUMENTED_FOR_ITER, [INSTRUMENTED_FOR_ITER] = INSTRUMENTED_FOR_ITER, + [POP_ITER] = INSTRUMENTED_POP_ITER, + [INSTRUMENTED_POP_ITER] = INSTRUMENTED_POP_ITER, [LOAD_SUPER_ATTR] = INSTRUMENTED_LOAD_SUPER_ATTR, [INSTRUMENTED_LOAD_SUPER_ATTR] = INSTRUMENTED_LOAD_SUPER_ATTR, [NOT_TAKEN] = INSTRUMENTED_NOT_TAKEN, @@ -279,48 +285,36 @@ get_events(_Py_GlobalMonitors *m, int tool_id) return result; } -/* Line delta. - * 8 bit value. - * if line_delta == -128: - * line = None # represented as -1 - * elif line_delta == -127 or line_delta == -126: - * line = PyCode_Addr2Line(code, offset * sizeof(_Py_CODEUNIT)); +/* Module code can have line 0, even though modules start at line 1, + * so -1 is a legal delta. */ +#define NO_LINE (-2) + +/* Returns the line delta. Defined as: + * if line is None: + * line_delta = NO_LINE * else: - * line = first_line + (offset >> OFFSET_SHIFT) + line_delta; + * line_delta = line - first_line */ - -#define NO_LINE -128 -#define COMPUTED_LINE_LINENO_CHANGE -127 -#define COMPUTED_LINE -126 - -#define OFFSET_SHIFT 4 - -static int8_t -compute_line_delta(PyCodeObject *code, int offset, int line) +static int +compute_line_delta(PyCodeObject *code, int line) { if (line < 0) { + assert(line == -1); return NO_LINE; } - int delta = line - code->co_firstlineno - (offset >> OFFSET_SHIFT); - if (delta <= INT8_MAX && delta > COMPUTED_LINE) { - return delta; - } - return COMPUTED_LINE; + int delta = line - code->co_firstlineno; + assert(delta > NO_LINE); + return delta; } static int -compute_line(PyCodeObject *code, int offset, int8_t line_delta) +compute_line(PyCodeObject *code, int line_delta) { - if (line_delta > COMPUTED_LINE) { - return code->co_firstlineno + (offset >> OFFSET_SHIFT) + line_delta; - } if (line_delta == NO_LINE) { - return -1; } - assert(line_delta == COMPUTED_LINE || line_delta == COMPUTED_LINE_LINENO_CHANGE); - /* Look it up */ - return PyCode_Addr2Line(code, offset * sizeof(_Py_CODEUNIT)); + assert(line_delta > NO_LINE); + return code->co_firstlineno + line_delta; } int @@ -332,6 +326,57 @@ _PyInstruction_GetLength(PyCodeObject *code, int offset) return 1 + _PyOpcode_Caches[inst.op.code]; } +static inline uint8_t +get_original_opcode(_PyCoLineInstrumentationData *line_data, int index) +{ + return line_data->data[index*line_data->bytes_per_entry]; +} + +static inline uint8_t * +get_original_opcode_ptr(_PyCoLineInstrumentationData *line_data, int index) +{ + return &line_data->data[index*line_data->bytes_per_entry]; +} + +static inline void +set_original_opcode(_PyCoLineInstrumentationData *line_data, int index, uint8_t opcode) +{ + line_data->data[index*line_data->bytes_per_entry] = opcode; +} + +static inline int +get_line_delta(_PyCoLineInstrumentationData *line_data, int index) +{ + uint8_t *ptr = &line_data->data[index*line_data->bytes_per_entry+1]; + assert(line_data->bytes_per_entry >= 2); + uint32_t value = *ptr; + for (int idx = 2; idx < line_data->bytes_per_entry; idx++) { + ptr++; + int shift = (idx-1)*8; + value |= ((uint32_t)(*ptr)) << shift; + } + assert(value < INT_MAX); + /* NO_LINE is stored as zero. */ + return ((int)value) + NO_LINE; +} + +static inline void +set_line_delta(_PyCoLineInstrumentationData *line_data, int index, int line_delta) +{ + /* Store line_delta + 2 as we need -2 to represent no line number */ + assert(line_delta >= NO_LINE); + uint32_t adjusted = line_delta - NO_LINE; + uint8_t *ptr = &line_data->data[index*line_data->bytes_per_entry+1]; + assert(adjusted < (1ULL << ((line_data->bytes_per_entry-1)*8))); + assert(line_data->bytes_per_entry >= 2); + *ptr = adjusted & 0xff; + for (int idx = 2; idx < line_data->bytes_per_entry; idx++) { + ptr++; + adjusted >>= 8; + *ptr = adjusted & 0xff; + } +} + #ifdef INSTRUMENT_DEBUG static void @@ -351,11 +396,15 @@ dump_instrumentation_data_lines(PyCodeObject *code, _PyCoLineInstrumentationData if (lines == NULL) { fprintf(out, ", lines = NULL"); } - else if (lines[i].original_opcode == 0) { - fprintf(out, ", lines = {original_opcode = No LINE (0), line_delta = %d)", lines[i].line_delta); - } else { - fprintf(out, ", lines = {original_opcode = %s, line_delta = %d)", _PyOpcode_OpName[lines[i].original_opcode], lines[i].line_delta); + int opcode = get_original_opcode(lines, i); + int line_delta = get_line_delta(lines, i); + if (opcode == 0) { + fprintf(out, ", lines = {original_opcode = No LINE (0), line_delta = %d)", line_delta); + } + else { + fprintf(out, ", lines = {original_opcode = %s, line_delta = %d)", _PyOpcode_OpName[opcode], line_delta); + } } } @@ -405,6 +454,12 @@ dump_local_monitors(const char *prefix, _Py_LocalMonitors monitors, FILE*out) } } +/** NOTE: + * Do not use PyCode_Addr2Line to determine the line number in instrumentation, + * as `PyCode_Addr2Line` uses the monitoring data if it is available. + */ + + /* No error checking -- Don't use this for anything but experimental debugging */ static void dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) @@ -422,6 +477,8 @@ dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) dump_local_monitors("Active", data->active_monitors, out); int code_len = (int)Py_SIZE(code); bool starred = false; + PyCodeAddressRange range; + _PyCode_InitAddressRange(code, &range); for (int i = 0; i < code_len; i += _PyInstruction_GetLength(code, i)) { _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; int opcode = instr->op.code; @@ -429,7 +486,7 @@ dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) fprintf(out, "** "); starred = true; } - fprintf(out, "Offset: %d, line: %d %s: ", i, PyCode_Addr2Line(code, i*2), _PyOpcode_OpName[opcode]); + fprintf(out, "Offset: %d, line: %d %s: ", i, _PyCode_CheckLineNumber(i*2, &range), _PyOpcode_OpName[opcode]); dump_instrumentation_data_tools(code, data->tools, i, out); dump_instrumentation_data_lines(code, data->lines, i, out); dump_instrumentation_data_line_tools(code, data->line_tools, i, out); @@ -494,10 +551,12 @@ sanity_check_instrumentation(PyCodeObject *code) code->_co_monitoring->active_monitors, active_monitors)); int code_len = (int)Py_SIZE(code); + PyCodeAddressRange range; + _PyCode_InitAddressRange(co, &range); for (int i = 0; i < code_len;) { _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; int opcode = instr->op.code; - int base_opcode = _Py_GetBaseCodeUnit(code, offset).op.code; + int base_opcode = _Py_GetBaseCodeUnit(code, i).op.code; CHECK(valid_opcode(opcode)); CHECK(valid_opcode(base_opcode)); if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -508,8 +567,8 @@ sanity_check_instrumentation(PyCodeObject *code) } if (opcode == INSTRUMENTED_LINE) { CHECK(data->lines); - CHECK(valid_opcode(data->lines[i].original_opcode)); - opcode = data->lines[i].original_opcode; + opcode = get_original_opcode(data->lines, i); + CHECK(valid_opcode(opcode)); CHECK(opcode != END_FOR); CHECK(opcode != RESUME); CHECK(opcode != RESUME_CHECK); @@ -524,7 +583,7 @@ sanity_check_instrumentation(PyCodeObject *code) * *and* we are executing a INSTRUMENTED_LINE instruction * that has de-instrumented itself, then we will execute * an invalid INSTRUMENTED_INSTRUCTION */ - CHECK(data->lines[i].original_opcode != INSTRUMENTED_INSTRUCTION); + CHECK(get_original_opcode(data->lines, i) != INSTRUMENTED_INSTRUCTION); } if (opcode == INSTRUMENTED_INSTRUCTION) { CHECK(data->per_instruction_opcodes[i] != 0); @@ -539,9 +598,9 @@ sanity_check_instrumentation(PyCodeObject *code) } CHECK(active_monitors.tools[event] != 0); } - if (data->lines && base_opcode != END_FOR) { - int line1 = compute_line(code, i, data->lines[i].line_delta); - int line2 = PyCode_Addr2Line(code, i*sizeof(_Py_CODEUNIT)); + if (data->lines && get_original_opcode(data->lines, i)) { + int line1 = compute_line(code, get_line_delta(data->lines, i)); + int line2 = _PyCode_CheckLineNumber(i*sizeof(_Py_CODEUNIT), &range); CHECK(line1 == line2); } CHECK(valid_opcode(opcode)); @@ -591,7 +650,7 @@ _Py_GetBaseCodeUnit(PyCodeObject *code, int i) return inst; } if (opcode == INSTRUMENTED_LINE) { - opcode = code->_co_monitoring->lines[i].original_opcode; + opcode = get_original_opcode(code->_co_monitoring->lines, i); } if (opcode == INSTRUMENTED_INSTRUCTION) { opcode = code->_co_monitoring->per_instruction_opcodes[i]; @@ -610,7 +669,7 @@ _Py_GetBaseCodeUnit(PyCodeObject *code, int i) } static void -de_instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, +de_instrument(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, int event) { assert(event != PY_MONITORING_EVENT_INSTRUCTION); @@ -621,7 +680,7 @@ de_instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, int opcode = *opcode_ptr; assert(opcode != ENTER_EXECUTOR); if (opcode == INSTRUMENTED_LINE) { - opcode_ptr = &monitoring->lines[i].original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -641,7 +700,7 @@ de_instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, } static void -de_instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, +de_instrument_line(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; @@ -649,10 +708,10 @@ de_instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, if (opcode != INSTRUMENTED_LINE) { return; } - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - int original_opcode = lines->original_opcode; + _PyCoLineInstrumentationData *lines = monitoring->lines; + int original_opcode = get_original_opcode(lines, i); if (original_opcode == INSTRUMENTED_INSTRUCTION) { - lines->original_opcode = monitoring->per_instruction_opcodes[i]; + set_original_opcode(lines, i, monitoring->per_instruction_opcodes[i]); } CHECK(original_opcode != 0); CHECK(original_opcode == _PyOpcode_Deopt[original_opcode]); @@ -665,14 +724,14 @@ de_instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, } static void -de_instrument_per_instruction(_Py_CODEUNIT *bytecode, +de_instrument_per_instruction(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; uint8_t *opcode_ptr = &instr->op.code; int opcode = *opcode_ptr; if (opcode == INSTRUMENTED_LINE) { - opcode_ptr = &monitoring->lines[i].original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode != INSTRUMENTED_INSTRUCTION) { @@ -691,14 +750,13 @@ de_instrument_per_instruction(_Py_CODEUNIT *bytecode, } static void -instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) +instrument(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; uint8_t *opcode_ptr = &instr->op.code; int opcode =*opcode_ptr; if (opcode == INSTRUMENTED_LINE) { - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - opcode_ptr = &lines->original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -721,29 +779,27 @@ instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) } static void -instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) +instrument_line(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { uint8_t *opcode_ptr = &bytecode[i].op.code; int opcode = *opcode_ptr; if (opcode == INSTRUMENTED_LINE) { return; } - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - lines->original_opcode = _PyOpcode_Deopt[opcode]; - CHECK(lines->original_opcode > 0); + set_original_opcode(monitoring->lines, i, _PyOpcode_Deopt[opcode]); + CHECK(get_line_delta(monitoring->lines, i) > NO_LINE); FT_ATOMIC_STORE_UINT8_RELAXED(*opcode_ptr, INSTRUMENTED_LINE); } static void -instrument_per_instruction(_Py_CODEUNIT *bytecode, +instrument_per_instruction(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; uint8_t *opcode_ptr = &instr->op.code; int opcode = *opcode_ptr; if (opcode == INSTRUMENTED_LINE) { - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - opcode_ptr = &lines->original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -1077,8 +1133,8 @@ static const char *const event_names [] = { static int call_instrumentation_vector( - PyThreadState *tstate, int event, - _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, Py_ssize_t nargs, PyObject *args[]) + _Py_CODEUNIT *instr, PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *arg2, Py_ssize_t nargs, PyObject *args[]) { if (tstate->tracing) { return 0; @@ -1091,17 +1147,13 @@ call_instrumentation_vector( int offset = (int)(instr - _PyFrame_GetBytecode(frame)); /* Offset visible to user should be the offset in bytes, as that is the * convention for APIs involving code offsets. */ - int bytes_offset = offset * (int)sizeof(_Py_CODEUNIT); - if (event == PY_MONITORING_EVENT_BRANCH_LEFT) { - assert(EVENT_FOR_OPCODE[_Py_GetBaseCodeUnit(code, offset-2).op.code] == PY_MONITORING_EVENT_BRANCH_RIGHT); - bytes_offset -= 4; - } - PyObject *offset_obj = PyLong_FromLong(bytes_offset); - if (offset_obj == NULL) { + int bytes_arg2 = (int)(arg2 - _PyFrame_GetBytecode(frame)) * (int)sizeof(_Py_CODEUNIT); + PyObject *arg2_obj = PyLong_FromLong(bytes_arg2); + if (arg2_obj == NULL) { return -1; } assert(args[2] == NULL); - args[2] = offset_obj; + args[2] = arg2_obj; PyInterpreterState *interp = tstate->interp; uint8_t tools = get_tools_for_instruction(code, interp, offset, event); size_t nargsf = (size_t) nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; @@ -1139,7 +1191,7 @@ call_instrumentation_vector( } } } - Py_DECREF(offset_obj); + Py_DECREF(arg2_obj); return err; } @@ -1149,7 +1201,7 @@ _Py_call_instrumentation( _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) { PyObject *args[3] = { NULL, NULL, NULL }; - return call_instrumentation_vector(tstate, event, frame, instr, 2, args); + return call_instrumentation_vector(instr, tstate, event, frame, instr, 2, args); } int @@ -1158,7 +1210,7 @@ _Py_call_instrumentation_arg( _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg) { PyObject *args[4] = { NULL, NULL, NULL, arg }; - return call_instrumentation_vector(tstate, event, frame, instr, 3, args); + return call_instrumentation_vector(instr, tstate, event, frame, instr, 3, args); } int @@ -1167,34 +1219,34 @@ _Py_call_instrumentation_2args( _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1) { PyObject *args[5] = { NULL, NULL, NULL, arg0, arg1 }; - return call_instrumentation_vector(tstate, event, frame, instr, 4, args); + return call_instrumentation_vector(instr, tstate, event, frame, instr, 4, args); } _Py_CODEUNIT * _Py_call_instrumentation_jump( - PyThreadState *tstate, int event, - _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target) + _Py_CODEUNIT *instr, PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *src, _Py_CODEUNIT *dest) { assert(event == PY_MONITORING_EVENT_JUMP || event == PY_MONITORING_EVENT_BRANCH_RIGHT || event == PY_MONITORING_EVENT_BRANCH_LEFT); - assert(frame->instr_ptr == instr); - int to = (int)(target - _PyFrame_GetBytecode(frame)); + int to = (int)(dest - _PyFrame_GetBytecode(frame)); PyObject *to_obj = PyLong_FromLong(to * (int)sizeof(_Py_CODEUNIT)); if (to_obj == NULL) { return NULL; } PyObject *args[4] = { NULL, NULL, NULL, to_obj }; - int err = call_instrumentation_vector(tstate, event, frame, instr, 3, args); + _Py_CODEUNIT *instr_ptr = frame->instr_ptr; + int err = call_instrumentation_vector(instr, tstate, event, frame, src, 3, args); Py_DECREF(to_obj); if (err) { return NULL; } - if (frame->instr_ptr != instr) { + if (frame->instr_ptr != instr_ptr) { /* The callback has caused a jump (by setting the line number) */ return frame->instr_ptr; } - return target; + return dest; } static void @@ -1204,7 +1256,7 @@ call_instrumentation_vector_protected( { assert(_PyErr_Occurred(tstate)); PyObject *exc = _PyErr_GetRaisedException(tstate); - int err = call_instrumentation_vector(tstate, event, frame, instr, nargs, args); + int err = call_instrumentation_vector(instr, tstate, event, frame, instr, nargs, args); if (err) { Py_XDECREF(exc); } @@ -1224,18 +1276,16 @@ _Py_call_instrumentation_exc2( call_instrumentation_vector_protected(tstate, event, frame, instr, 4, args); } - int _Py_Instrumentation_GetLine(PyCodeObject *code, int index) { _PyCoMonitoringData *monitoring = code->_co_monitoring; assert(monitoring != NULL); assert(monitoring->lines != NULL); - assert(index >= code->_co_firsttraceable); assert(index < Py_SIZE(code)); - _PyCoLineInstrumentationData *line_data = &monitoring->lines[index]; - int8_t line_delta = line_data->line_delta; - int line = compute_line(code, index, line_delta); + _PyCoLineInstrumentationData *line_data = monitoring->lines; + int line_delta = get_line_delta(line_data, index); + int line = compute_line(code, line_delta); return line; } @@ -1249,29 +1299,20 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, int i = (int)(instr - bytecode); _PyCoMonitoringData *monitoring = code->_co_monitoring; - _PyCoLineInstrumentationData *line_data = &monitoring->lines[i]; + _PyCoLineInstrumentationData *line_data = monitoring->lines; PyInterpreterState *interp = tstate->interp; - int8_t line_delta = line_data->line_delta; - int line = 0; - - if (line_delta == COMPUTED_LINE_LINENO_CHANGE) { - // We know the line number must have changed, don't need to calculate - // the line number for now because we might not need it. - line = -1; - } else { - line = compute_line(code, i, line_delta); - assert(line >= 0); - assert(prev != NULL); - int prev_index = (int)(prev - bytecode); - int prev_line = _Py_Instrumentation_GetLine(code, prev_index); - if (prev_line == line) { - int prev_opcode = bytecode[prev_index].op.code; - /* RESUME and INSTRUMENTED_RESUME are needed for the operation of - * instrumentation, so must never be hidden by an INSTRUMENTED_LINE. - */ - if (prev_opcode != RESUME && prev_opcode != INSTRUMENTED_RESUME) { - goto done; - } + int line = _Py_Instrumentation_GetLine(code, i); + assert(line >= 0); + assert(prev != NULL); + int prev_index = (int)(prev - bytecode); + int prev_line = _Py_Instrumentation_GetLine(code, prev_index); + if (prev_line == line) { + int prev_opcode = bytecode[prev_index].op.code; + /* RESUME and INSTRUMENTED_RESUME are needed for the operation of + * instrumentation, so must never be hidden by an INSTRUMENTED_LINE. + */ + if (prev_opcode != RESUME && prev_opcode != INSTRUMENTED_RESUME) { + goto done; } } @@ -1296,12 +1337,6 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, tstate->tracing++; /* Call c_tracefunc directly, having set the line number. */ Py_INCREF(frame_obj); - if (line == -1 && line_delta > COMPUTED_LINE) { - /* Only assign f_lineno if it's easy to calculate, otherwise - * do lazy calculation by setting the f_lineno to 0. - */ - line = compute_line(code, i, line_delta); - } frame_obj->f_lineno = line; int err = tstate->c_tracefunc(tstate->c_traceobj, frame_obj, PyTrace_LINE, Py_None); frame_obj->f_lineno = 0; @@ -1318,11 +1353,6 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, if (tools == 0) { goto done; } - - if (line == -1) { - /* Need to calculate the line number now for monitoring events */ - line = compute_line(code, i, line_delta); - } PyObject *line_obj = PyLong_FromLong(line); if (line_obj == NULL) { return -1; @@ -1354,7 +1384,7 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, Py_DECREF(line_obj); uint8_t original_opcode; done: - original_opcode = line_data->original_opcode; + original_opcode = get_original_opcode(line_data, i); assert(original_opcode != 0); assert(original_opcode != INSTRUMENTED_LINE); assert(_PyOpcode_Deopt[original_opcode] == original_opcode); @@ -1426,7 +1456,7 @@ initialize_tools(PyCodeObject *code) int opcode = instr->op.code; assert(opcode != ENTER_EXECUTOR); if (opcode == INSTRUMENTED_LINE) { - opcode = code->_co_monitoring->lines[i].original_opcode; + opcode = get_original_opcode(code->_co_monitoring->lines, i); } if (opcode == INSTRUMENTED_INSTRUCTION) { opcode = code->_co_monitoring->per_instruction_opcodes[i]; @@ -1469,63 +1499,58 @@ initialize_tools(PyCodeObject *code) } } -#define NO_LINE -128 - static void -initialize_lines(PyCodeObject *code) +initialize_lines(PyCodeObject *code, int bytes_per_entry) { ASSERT_WORLD_STOPPED_OR_LOCKED(code); _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; assert(line_data != NULL); + line_data->bytes_per_entry = bytes_per_entry; int code_len = (int)Py_SIZE(code); PyCodeAddressRange range; _PyCode_InitAddressRange(code, &range); - for (int i = 0; i < code->_co_firsttraceable && i < code_len; i++) { - line_data[i].original_opcode = 0; - line_data[i].line_delta = -127; - } int current_line = -1; - for (int i = code->_co_firsttraceable; i < code_len; ) { + for (int i = 0; i < code_len; ) { int opcode = _Py_GetBaseCodeUnit(code, i).op.code; int line = _PyCode_CheckLineNumber(i*(int)sizeof(_Py_CODEUNIT), &range); - line_data[i].line_delta = compute_line_delta(code, i, line); + set_line_delta(line_data, i, compute_line_delta(code, line)); int length = _PyInstruction_GetLength(code, i); - switch (opcode) { - case END_ASYNC_FOR: - case END_FOR: - case END_SEND: - case RESUME: - /* END_FOR cannot start a line, as it is skipped by FOR_ITER - * END_SEND cannot start a line, as it is skipped by SEND - * RESUME must not be instrumented with INSTRUMENT_LINE */ - line_data[i].original_opcode = 0; - break; - default: - /* Set original_opcode to the opcode iff the instruction - * starts a line, and thus should be instrumented. - * This saves having to perform this check every time the - * we turn instrumentation on or off, and serves as a sanity - * check when debugging. - */ - if (line != current_line && line >= 0) { - line_data[i].original_opcode = opcode; - if (line_data[i].line_delta == COMPUTED_LINE) { - /* Label this line as a line with a line number change - * which could help the monitoring callback to quickly - * identify the line number change. - */ - line_data[i].line_delta = COMPUTED_LINE_LINENO_CHANGE; + if (i < code->_co_firsttraceable) { + set_original_opcode(line_data, i, 0); + } + else { + switch (opcode) { + case END_ASYNC_FOR: + case END_FOR: + case END_SEND: + case RESUME: + case POP_ITER: + /* END_FOR cannot start a line, as it is skipped by FOR_ITER + * END_SEND cannot start a line, as it is skipped by SEND + * RESUME and POP_ITER must not be instrumented with INSTRUMENTED_LINE */ + set_original_opcode(line_data, i, 0); + break; + default: + /* Set original_opcode to the opcode iff the instruction + * starts a line, and thus should be instrumented. + * This saves having to perform this check every time the + * we turn instrumentation on or off, and serves as a sanity + * check when debugging. + */ + if (line != current_line && line >= 0) { + set_original_opcode(line_data, i, opcode); + CHECK(get_line_delta(line_data, i) != NO_LINE); } - } - else { - line_data[i].original_opcode = 0; - } - current_line = line; + else { + set_original_opcode(line_data, i, 0); + } + current_line = line; + } } for (int j = 1; j < length; j++) { - line_data[i+j].original_opcode = 0; - line_data[i+j].line_delta = NO_LINE; + set_original_opcode(line_data, i+j, 0); + set_line_delta(line_data, i+j, NO_LINE); } i += length; } @@ -1569,12 +1594,10 @@ initialize_lines(PyCodeObject *code) continue; } assert(target >= 0); - if (line_data[target].line_delta != NO_LINE) { - line_data[target].original_opcode = _Py_GetBaseCodeUnit(code, target).op.code; - if (line_data[target].line_delta == COMPUTED_LINE_LINENO_CHANGE) { - // If the line is a jump target, we are not sure if the line - // number changes, so we set it to COMPUTED_LINE. - line_data[target].line_delta = COMPUTED_LINE; + if (get_line_delta(line_data, target) != NO_LINE) { + int opcode = _Py_GetBaseCodeUnit(code, target).op.code; + if (opcode != POP_ITER) { + set_original_opcode(line_data, target, opcode); } } } @@ -1597,9 +1620,8 @@ initialize_lines(PyCodeObject *code) * END_ASYNC_FOR is a bit special as it marks the end of * an `async for` loop, which should not generate its own * line event. */ - if (line_data[handler].line_delta != NO_LINE && - original_opcode != END_ASYNC_FOR) { - line_data[handler].original_opcode = original_opcode; + if (get_line_delta(line_data, handler) != NO_LINE && original_opcode != END_ASYNC_FOR) { + set_original_opcode(line_data, handler, original_opcode); } } } @@ -1672,12 +1694,39 @@ update_instrumentation_data(PyCodeObject *code, PyInterpreterState *interp) } if (all_events.tools[PY_MONITORING_EVENT_LINE]) { if (code->_co_monitoring->lines == NULL) { - code->_co_monitoring->lines = PyMem_Malloc(code_len * sizeof(_PyCoLineInstrumentationData)); + PyCodeAddressRange range; + _PyCode_InitAddressRange(code, &range); + int max_line = code->co_firstlineno + 1; + _PyCode_InitAddressRange(code, &range); + for (int i = code->_co_firsttraceable; i < code_len; ) { + int line = _PyCode_CheckLineNumber(i*(int)sizeof(_Py_CODEUNIT), &range); + if (line > max_line) { + max_line = line; + } + int length = _PyInstruction_GetLength(code, i); + i += length; + } + int bytes_per_entry; + int max_delta = max_line - code->co_firstlineno; + /* We store delta+2 in the table, so 253 is max for one byte */ + if (max_delta < 256+NO_LINE) { + bytes_per_entry = 2; + } + else if (max_delta < (1 << 16)+NO_LINE) { + bytes_per_entry = 3; + } + else if (max_delta < (1 << 24)+NO_LINE) { + bytes_per_entry = 4; + } + else { + bytes_per_entry = 5; + } + code->_co_monitoring->lines = PyMem_Malloc(1 + code_len * bytes_per_entry); if (code->_co_monitoring->lines == NULL) { PyErr_NoMemory(); return -1; } - initialize_lines(code); + initialize_lines(code, bytes_per_entry); } if (multitools && code->_co_monitoring->line_tools == NULL) { code->_co_monitoring->line_tools = PyMem_Malloc(code_len); @@ -1792,7 +1841,7 @@ force_instrument_lock_held(PyCodeObject *code, PyInterpreterState *interp) if (removed_line_tools) { _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; for (int i = code->_co_firsttraceable; i < code_len;) { - if (line_data[i].original_opcode) { + if (get_original_opcode(line_data, i)) { remove_line_tools(code, i, removed_line_tools); } i += _PyInstruction_GetLength(code, i); @@ -1819,7 +1868,7 @@ force_instrument_lock_held(PyCodeObject *code, PyInterpreterState *interp) if (new_line_tools) { _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; for (int i = code->_co_firsttraceable; i < code_len;) { - if (line_data[i].original_opcode) { + if (get_original_opcode(line_data, i)) { add_line_tools(code, i, new_line_tools); } i += _PyInstruction_GetLength(code, i); @@ -1892,7 +1941,7 @@ instrument_all_executing_code_objects(PyInterpreterState *interp) { while (ts) { _PyInterpreterFrame *frame = ts->current_frame; while (frame) { - if (frame->owner != FRAME_OWNED_BY_CSTACK) { + if (frame->owner < FRAME_OWNED_BY_INTERPRETER) { if (instrument_lock_held(_PyFrame_GetCode(frame), interp)) { return -1; } @@ -2887,30 +2936,52 @@ branch_handler( _PyLegacyBranchEventHandler *self, PyObject *const *args, size_t nargsf, PyObject *kwnames ) { + // Find the other instrumented instruction and remove tool + // The spec (PEP 669) allows spurious events after a DISABLE, + // so a best effort is good enough. + assert(PyVectorcall_NARGS(nargsf) >= 3); + PyCodeObject *code = (PyCodeObject *)args[0]; + int src_offset = PyLong_AsLong(args[1]); + if (PyErr_Occurred()) { + return NULL; + } + _Py_CODEUNIT instr = _PyCode_CODE(code)[src_offset/2]; + if (!is_instrumented(instr.op.code)) { + /* Already disabled */ + return &_PyInstrumentation_DISABLE; + } PyObject *res = PyObject_Vectorcall(self->handler, args, nargsf, kwnames); if (res == &_PyInstrumentation_DISABLE) { - // Find the other instrumented instruction and remove tool - assert(PyVectorcall_NARGS(nargsf) >= 2); - PyObject *offset_obj = args[1]; - int bytes_offset = PyLong_AsLong(offset_obj); - if (PyErr_Occurred()) { - return NULL; - } - PyCodeObject *code = (PyCodeObject *)args[0]; - if (!PyCode_Check(code) || (bytes_offset & 1)) { - return res; - } - int offset = bytes_offset / 2; /* We need FOR_ITER and POP_JUMP_ to be the same size */ assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1); - if (self->right) { - offset += 2; + int offset; + int other_event; + if (instr.op.code == FOR_ITER) { + if (self->right) { + offset = src_offset/2; + other_event = PY_MONITORING_EVENT_BRANCH_LEFT; + } + else { + // We don't know where the POP_ITER is, so + // we cannot de-instrument it. + return res; + } } - if (offset >= Py_SIZE(code)) { + else if (IS_CONDITIONAL_JUMP_OPCODE(instr.op.code)) { + if (self->right) { + offset = src_offset/2 + 2; + other_event = PY_MONITORING_EVENT_BRANCH_LEFT; + assert(_Py_GetBaseCodeUnit(code, offset).op.code == NOT_TAKEN); + } + else { + offset = src_offset/2; + other_event = PY_MONITORING_EVENT_BRANCH_RIGHT; + } + } + else { + // Orphaned NOT_TAKEN -- Jump removed by the compiler return res; } - int other_event = self->right ? - PY_MONITORING_EVENT_BRANCH_LEFT : PY_MONITORING_EVENT_BRANCH_RIGHT; LOCK_CODE(code); remove_tools(code, offset, other_event, 1 << self->tool_id); UNLOCK_CODE(); @@ -3013,15 +3084,30 @@ static PyObject * branchesiter_next(branchesiterator *bi) { int offset = bi->bi_offset; + int oparg = 0; while (offset < Py_SIZE(bi->bi_code)) { _Py_CODEUNIT inst = _Py_GetBaseCodeUnit(bi->bi_code, offset); - int next_offset = offset + _PyInstruction_GetLength(bi->bi_code, offset); - int event = EVENT_FOR_OPCODE[inst.op.code]; - if (event == PY_MONITORING_EVENT_BRANCH_RIGHT) { - /* Skip NOT_TAKEN */ - int not_taken = next_offset + 1; - bi->bi_offset = not_taken; - return int_triple(offset*2, not_taken*2, (next_offset + inst.op.arg)*2); + int next_offset = offset + 1 + _PyOpcode_Caches[inst.op.code]; + switch(inst.op.code) { + case EXTENDED_ARG: + oparg = (oparg << 8) | inst.op.arg; + break; + case FOR_ITER: + oparg = (oparg << 8) | inst.op.arg; + bi->bi_offset = next_offset; + int target = next_offset + oparg+2; // Skips END_FOR and POP_ITER + return int_triple(offset*2, next_offset*2, target*2); + case POP_JUMP_IF_FALSE: + case POP_JUMP_IF_TRUE: + case POP_JUMP_IF_NONE: + case POP_JUMP_IF_NOT_NONE: + oparg = (oparg << 8) | inst.op.arg; + /* Skip NOT_TAKEN */ + int not_taken = next_offset + 1; + bi->bi_offset = not_taken; + return int_triple(offset*2, not_taken*2, (next_offset + oparg)*2); + default: + oparg = 0; } offset = next_offset; } diff --git a/Python/jit.c b/Python/jit.c index 7dd0da7a45055a..092b873bc734e1 100644 --- a/Python/jit.c +++ b/Python/jit.c @@ -87,6 +87,7 @@ jit_free(unsigned char *memory, size_t size) jit_error("unable to free memory"); return -1; } + OPT_STAT_ADD(jit_freed_memory_size, size); return 0; } @@ -501,8 +502,8 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz // Round up to the nearest page: size_t page_size = get_page_size(); assert((page_size & (page_size - 1)) == 0); - size_t padding = page_size - ((code_size + data_size + state.trampolines.size) & (page_size - 1)); - size_t total_size = code_size + data_size + state.trampolines.size + padding; + size_t padding = page_size - ((code_size + state.trampolines.size + data_size) & (page_size - 1)); + size_t total_size = code_size + state.trampolines.size + data_size + padding; unsigned char *memory = jit_alloc(total_size); if (memory == NULL) { return -1; @@ -510,14 +511,21 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz #ifdef MAP_JIT pthread_jit_write_protect_np(0); #endif + // Collect memory stats + OPT_STAT_ADD(jit_total_memory_size, total_size); + OPT_STAT_ADD(jit_code_size, code_size); + OPT_STAT_ADD(jit_trampoline_size, state.trampolines.size); + OPT_STAT_ADD(jit_data_size, data_size); + OPT_STAT_ADD(jit_padding_size, padding); + OPT_HIST(total_size, trace_total_memory_hist); // Update the offsets of each instruction: for (size_t i = 0; i < length; i++) { state.instruction_starts[i] += (uintptr_t)memory; } // Loop again to emit the code: unsigned char *code = memory; - unsigned char *data = memory + code_size; - state.trampolines.mem = memory + code_size + data_size; + state.trampolines.mem = memory + code_size; + unsigned char *data = memory + code_size + state.trampolines.size; // Compile the shim, which handles converting between the native // calling convention and the calling convention used by jitted code // (which may be different for efficiency reasons). @@ -539,7 +547,7 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz code += group->code_size; data += group->data_size; assert(code == memory + code_size); - assert(data == memory + code_size + data_size); + assert(data == memory + code_size + state.trampolines.size + data_size); #ifdef MAP_JIT pthread_jit_write_protect_np(1); #endif @@ -563,7 +571,8 @@ _PyJIT_Free(_PyExecutorObject *executor) executor->jit_side_entry = NULL; executor->jit_size = 0; if (jit_free(memory, size)) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "freeing JIT memory"); } } } diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c index 45af275f1f6dce..97634f9183c7d5 100644 --- a/Python/legacy_tracing.c +++ b/Python/legacy_tracing.c @@ -491,8 +491,8 @@ int _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) { assert(is_tstate_valid(tstate)); - /* The caller must hold the GIL */ - assert(PyGILState_Check()); + /* The caller must hold a thread state */ + _Py_AssertHoldsTstate(); /* Call _PySys_Audit() in the context of the current thread state, even if tstate is not the current thread state. */ @@ -586,8 +586,8 @@ int _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) { assert(is_tstate_valid(tstate)); - /* The caller must hold the GIL */ - assert(PyGILState_Check()); + /* The caller must hold a thread state */ + _Py_AssertHoldsTstate(); /* Call _PySys_Audit() in the context of the current thread state, even if tstate is not the current thread state. */ diff --git a/Python/marshal.c b/Python/marshal.c index 72afa4ff89432c..cf7011652513ae 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -240,10 +240,6 @@ w_short_pstring(const void *s, Py_ssize_t n, WFILE *p) #define PyLong_MARSHAL_SHIFT 15 #define PyLong_MARSHAL_BASE ((short)1 << PyLong_MARSHAL_SHIFT) #define PyLong_MARSHAL_MASK (PyLong_MARSHAL_BASE - 1) -#if PyLong_SHIFT % PyLong_MARSHAL_SHIFT != 0 -#error "PyLong_SHIFT must be a multiple of PyLong_MARSHAL_SHIFT" -#endif -#define PyLong_MARSHAL_RATIO (PyLong_SHIFT / PyLong_MARSHAL_SHIFT) #define W_TYPE(t, p) do { \ w_byte((t) | flag, (p)); \ @@ -252,47 +248,106 @@ w_short_pstring(const void *s, Py_ssize_t n, WFILE *p) static PyObject * _PyMarshal_WriteObjectToString(PyObject *x, int version, int allow_code); +#define _r_digits(bitsize) \ +static void \ +_r_digits##bitsize(const uint ## bitsize ## _t *digits, Py_ssize_t n, \ + uint8_t negative, Py_ssize_t marshal_ratio, WFILE *p) \ +{ \ + /* set l to number of base PyLong_MARSHAL_BASE digits */ \ + Py_ssize_t l = (n - 1)*marshal_ratio; \ + uint ## bitsize ## _t d = digits[n - 1]; \ + \ + assert(marshal_ratio > 0); \ + assert(n >= 1); \ + assert(d != 0); /* a PyLong is always normalized */ \ + do { \ + d >>= PyLong_MARSHAL_SHIFT; \ + l++; \ + } while (d != 0); \ + if (l > SIZE32_MAX) { \ + p->depth--; \ + p->error = WFERR_UNMARSHALLABLE; \ + return; \ + } \ + w_long((long)(negative ? -l : l), p); \ + \ + for (Py_ssize_t i = 0; i < n - 1; i++) { \ + d = digits[i]; \ + for (Py_ssize_t j = 0; j < marshal_ratio; j++) { \ + w_short(d & PyLong_MARSHAL_MASK, p); \ + d >>= PyLong_MARSHAL_SHIFT; \ + } \ + assert(d == 0); \ + } \ + d = digits[n - 1]; \ + do { \ + w_short(d & PyLong_MARSHAL_MASK, p); \ + d >>= PyLong_MARSHAL_SHIFT; \ + } while (d != 0); \ +} +_r_digits(16) +_r_digits(32) +#undef _r_digits + static void w_PyLong(const PyLongObject *ob, char flag, WFILE *p) { - Py_ssize_t i, j, n, l; - digit d; - W_TYPE(TYPE_LONG, p); if (_PyLong_IsZero(ob)) { w_long((long)0, p); return; } - /* set l to number of base PyLong_MARSHAL_BASE digits */ - n = _PyLong_DigitCount(ob); - l = (n-1) * PyLong_MARSHAL_RATIO; - d = ob->long_value.ob_digit[n-1]; - assert(d != 0); /* a PyLong is always normalized */ - do { - d >>= PyLong_MARSHAL_SHIFT; - l++; - } while (d != 0); - if (l > SIZE32_MAX) { + PyLongExport long_export; + + if (PyLong_Export((PyObject *)ob, &long_export) < 0) { p->depth--; p->error = WFERR_UNMARSHALLABLE; return; } - w_long((long)(_PyLong_IsNegative(ob) ? -l : l), p); + if (!long_export.digits) { + int8_t sign = long_export.value < 0 ? -1 : 1; + uint64_t abs_value = Py_ABS(long_export.value); + uint64_t d = abs_value; + long l = 0; - for (i=0; i < n-1; i++) { - d = ob->long_value.ob_digit[i]; - for (j=0; j < PyLong_MARSHAL_RATIO; j++) { + /* set l to number of base PyLong_MARSHAL_BASE digits */ + do { + d >>= PyLong_MARSHAL_SHIFT; + l += sign; + } while (d); + w_long(l, p); + + d = abs_value; + do { w_short(d & PyLong_MARSHAL_MASK, p); d >>= PyLong_MARSHAL_SHIFT; - } - assert (d == 0); + } while (d); + return; } - d = ob->long_value.ob_digit[n-1]; - do { - w_short(d & PyLong_MARSHAL_MASK, p); - d >>= PyLong_MARSHAL_SHIFT; - } while (d != 0); + + const PyLongLayout *layout = PyLong_GetNativeLayout(); + Py_ssize_t marshal_ratio = layout->bits_per_digit/PyLong_MARSHAL_SHIFT; + + /* must be a multiple of PyLong_MARSHAL_SHIFT */ + assert(layout->bits_per_digit % PyLong_MARSHAL_SHIFT == 0); + assert(layout->bits_per_digit >= PyLong_MARSHAL_SHIFT); + + /* other assumptions on PyLongObject internals */ + assert(layout->bits_per_digit <= 32); + assert(layout->digits_order == -1); + assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); + assert(layout->digit_size == 2 || layout->digit_size == 4); + + if (layout->digit_size == 4) { + _r_digits32(long_export.digits, long_export.ndigits, + long_export.negative, marshal_ratio, p); + } + else { + _r_digits16(long_export.digits, long_export.ndigits, + long_export.negative, marshal_ratio, p); + } + PyLong_FreeExport(&long_export); } static void @@ -875,17 +930,62 @@ r_long64(RFILE *p) 1 /* signed */); } +#define _w_digits(bitsize) \ +static int \ +_w_digits##bitsize(uint ## bitsize ## _t *digits, Py_ssize_t size, \ + Py_ssize_t marshal_ratio, \ + int shorts_in_top_digit, RFILE *p) \ +{ \ + uint ## bitsize ## _t d; \ + \ + assert(size >= 1); \ + for (Py_ssize_t i = 0; i < size - 1; i++) { \ + d = 0; \ + for (Py_ssize_t j = 0; j < marshal_ratio; j++) { \ + int md = r_short(p); \ + if (md < 0 || md > PyLong_MARSHAL_BASE) { \ + goto bad_digit; \ + } \ + d += (uint ## bitsize ## _t)md << j*PyLong_MARSHAL_SHIFT; \ + } \ + digits[i] = d; \ + } \ + \ + d = 0; \ + for (Py_ssize_t j = 0; j < shorts_in_top_digit; j++) { \ + int md = r_short(p); \ + if (md < 0 || md > PyLong_MARSHAL_BASE) { \ + goto bad_digit; \ + } \ + /* topmost marshal digit should be nonzero */ \ + if (md == 0 && j == shorts_in_top_digit - 1) { \ + PyErr_SetString(PyExc_ValueError, \ + "bad marshal data (unnormalized long data)"); \ + return -1; \ + } \ + d += (uint ## bitsize ## _t)md << j*PyLong_MARSHAL_SHIFT; \ + } \ + assert(!PyErr_Occurred()); \ + /* top digit should be nonzero, else the resulting PyLong won't be \ + normalized */ \ + digits[size - 1] = d; \ + return 0; \ + \ +bad_digit: \ + if (!PyErr_Occurred()) { \ + PyErr_SetString(PyExc_ValueError, \ + "bad marshal data (digit out of range in long)"); \ + } \ + return -1; \ +} +_w_digits(32) +_w_digits(16) +#undef _w_digits + static PyObject * r_PyLong(RFILE *p) { - PyLongObject *ob; - long n, size, i; - int j, md, shorts_in_top_digit; - digit d; - - n = r_long(p); - if (n == 0) - return (PyObject *)_PyLong_New(0); + long n = r_long(p); if (n == -1 && PyErr_Occurred()) { return NULL; } @@ -895,51 +995,44 @@ r_PyLong(RFILE *p) return NULL; } - size = 1 + (Py_ABS(n) - 1) / PyLong_MARSHAL_RATIO; - shorts_in_top_digit = 1 + (Py_ABS(n) - 1) % PyLong_MARSHAL_RATIO; - ob = _PyLong_New(size); - if (ob == NULL) - return NULL; + const PyLongLayout *layout = PyLong_GetNativeLayout(); + Py_ssize_t marshal_ratio = layout->bits_per_digit/PyLong_MARSHAL_SHIFT; - _PyLong_SetSignAndDigitCount(ob, n < 0 ? -1 : 1, size); + /* must be a multiple of PyLong_MARSHAL_SHIFT */ + assert(layout->bits_per_digit % PyLong_MARSHAL_SHIFT == 0); + assert(layout->bits_per_digit >= PyLong_MARSHAL_SHIFT); - for (i = 0; i < size-1; i++) { - d = 0; - for (j=0; j < PyLong_MARSHAL_RATIO; j++) { - md = r_short(p); - if (md < 0 || md > PyLong_MARSHAL_BASE) - goto bad_digit; - d += (digit)md << j*PyLong_MARSHAL_SHIFT; - } - ob->long_value.ob_digit[i] = d; + /* other assumptions on PyLongObject internals */ + assert(layout->bits_per_digit <= 32); + assert(layout->digits_order == -1); + assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); + assert(layout->digit_size == 2 || layout->digit_size == 4); + + Py_ssize_t size = 1 + (Py_ABS(n) - 1) / marshal_ratio; + + assert(size >= 1); + + int shorts_in_top_digit = 1 + (Py_ABS(n) - 1) % marshal_ratio; + void *digits; + PyLongWriter *writer = PyLongWriter_Create(n < 0, size, &digits); + + if (writer == NULL) { + return NULL; } - d = 0; - for (j=0; j < shorts_in_top_digit; j++) { - md = r_short(p); - if (md < 0 || md > PyLong_MARSHAL_BASE) - goto bad_digit; - /* topmost marshal digit should be nonzero */ - if (md == 0 && j == shorts_in_top_digit - 1) { - Py_DECREF(ob); - PyErr_SetString(PyExc_ValueError, - "bad marshal data (unnormalized long data)"); - return NULL; - } - d += (digit)md << j*PyLong_MARSHAL_SHIFT; + int ret; + + if (layout->digit_size == 4) { + ret = _w_digits32(digits, size, marshal_ratio, shorts_in_top_digit, p); } - assert(!PyErr_Occurred()); - /* top digit should be nonzero, else the resulting PyLong won't be - normalized */ - ob->long_value.ob_digit[size-1] = d; - return (PyObject *)ob; - bad_digit: - Py_DECREF(ob); - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ValueError, - "bad marshal data (digit out of range in long)"); + else { + ret = _w_digits16(digits, size, marshal_ratio, shorts_in_top_digit, p); + } + if (ret < 0) { + PyLongWriter_Discard(writer); + return NULL; } - return NULL; + return PyLongWriter_Finish(writer); } static double diff --git a/Python/modsupport.c b/Python/modsupport.c index 0fb7783345c78e..517dc971f88c87 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -648,3 +648,20 @@ PyModule_AddType(PyObject *module, PyTypeObject *type) return PyModule_AddObjectRef(module, name, (PyObject *)type); } + + +/* Exported functions for version helper macros */ + +#undef Py_PACK_FULL_VERSION +uint32_t +Py_PACK_FULL_VERSION(int x, int y, int z, int level, int serial) +{ + return _Py_PACK_FULL_VERSION(x, y, z, level, serial); +} + +#undef Py_PACK_VERSION +uint32_t +Py_PACK_VERSION(int x, int y) +{ + return Py_PACK_FULL_VERSION(x, y, 0, 0, 0); +} diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 7f3fb9c9a63dd1..09a834bb38fa67 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -3,6 +3,7 @@ static void *opcode_targets[256] = { &&TARGET_BINARY_SLICE, &&TARGET_BINARY_SUBSCR, &&TARGET_BINARY_OP_INPLACE_ADD_UNICODE, + &&TARGET_CALL_FUNCTION_EX, &&TARGET_CHECK_EG_MATCH, &&TARGET_CHECK_EXC_MATCH, &&TARGET_CLEANUP_THROW, @@ -15,8 +16,8 @@ static void *opcode_targets[256] = { &&TARGET_FORMAT_WITH_SPEC, &&TARGET_GET_AITER, &&TARGET_GET_ANEXT, - &&TARGET_GET_ITER, &&TARGET_RESERVED, + &&TARGET_GET_ITER, &&TARGET_GET_LEN, &&TARGET_GET_YIELD_FROM_ITER, &&TARGET_INTERPRETER_EXIT, @@ -29,6 +30,7 @@ static void *opcode_targets[256] = { &&TARGET_NOP, &&TARGET_NOT_TAKEN, &&TARGET_POP_EXCEPT, + &&TARGET_POP_ITER, &&TARGET_POP_TOP, &&TARGET_PUSH_EXC_INFO, &&TARGET_PUSH_NULL, @@ -50,7 +52,6 @@ static void *opcode_targets[256] = { &&TARGET_BUILD_STRING, &&TARGET_BUILD_TUPLE, &&TARGET_CALL, - &&TARGET_CALL_FUNCTION_EX, &&TARGET_CALL_INTRINSIC_1, &&TARGET_CALL_INTRINSIC_2, &&TARGET_CALL_KW, @@ -147,11 +148,11 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_RESUME, &&TARGET_BINARY_OP_ADD_FLOAT, &&TARGET_BINARY_OP_ADD_INT, &&TARGET_BINARY_OP_ADD_UNICODE, + &&TARGET_BINARY_OP_EXTEND, &&TARGET_BINARY_OP_MULTIPLY_FLOAT, &&TARGET_BINARY_OP_MULTIPLY_INT, &&TARGET_BINARY_OP_SUBTRACT_FLOAT, @@ -193,6 +194,8 @@ static void *opcode_targets[256] = { &&TARGET_FOR_ITER_LIST, &&TARGET_FOR_ITER_RANGE, &&TARGET_FOR_ITER_TUPLE, + &&TARGET_JUMP_BACKWARD_JIT, + &&TARGET_JUMP_BACKWARD_NO_JIT, &&TARGET_LOAD_ATTR_CLASS, &&TARGET_LOAD_ATTR_CLASS_WITH_METACLASS_CHECK, &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, @@ -207,6 +210,7 @@ static void *opcode_targets[256] = { &&TARGET_LOAD_ATTR_SLOT, &&TARGET_LOAD_ATTR_WITH_HINT, &&TARGET_LOAD_CONST_IMMORTAL, + &&TARGET_LOAD_CONST_MORTAL, &&TARGET_LOAD_GLOBAL_BUILTIN, &&TARGET_LOAD_GLOBAL_MODULE, &&TARGET_LOAD_SUPER_ATTR_ATTR, @@ -230,12 +234,8 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_INSTRUMENTED_END_FOR, + &&TARGET_INSTRUMENTED_POP_ITER, &&TARGET_INSTRUMENTED_END_SEND, &&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR, &&TARGET_INSTRUMENTED_FOR_ITER, diff --git a/Python/optimizer.c b/Python/optimizer.c index 6a4d20fad76c15..d71abd3224240b 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -91,72 +91,13 @@ insert_executor(PyCodeObject *code, _Py_CODEUNIT *instr, int index, _PyExecutorO instr->op.arg = index; } - -static int -never_optimize( - _PyOptimizerObject* self, - _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr, - _PyExecutorObject **exec, - int Py_UNUSED(stack_entries), - bool Py_UNUSED(progress_needed)) -{ - // This may be called if the optimizer is reset - return 0; -} - -PyTypeObject _PyDefaultOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "noop_optimizer", - .tp_basicsize = sizeof(_PyOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, -}; - -static _PyOptimizerObject _PyOptimizer_Default = { - PyObject_HEAD_INIT(&_PyDefaultOptimizer_Type) - .optimize = never_optimize, -}; - -_PyOptimizerObject * -_Py_GetOptimizer(void) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->optimizer == &_PyOptimizer_Default) { - return NULL; - } - Py_INCREF(interp->optimizer); - return interp->optimizer; -} - static _PyExecutorObject * make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFilter *dependencies); -static const _PyBloomFilter EMPTY_FILTER = { 0 }; - -_PyOptimizerObject * -_Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject *optimizer) -{ - if (optimizer == NULL) { - optimizer = &_PyOptimizer_Default; - } - _PyOptimizerObject *old = interp->optimizer; - if (old == NULL) { - old = &_PyOptimizer_Default; - } - Py_INCREF(optimizer); - interp->optimizer = optimizer; - return old; -} - -int -_Py_SetTier2Optimizer(_PyOptimizerObject *optimizer) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - _PyOptimizerObject *old = _Py_SetOptimizer(interp, optimizer); - Py_XDECREF(old); - return old == NULL ? -1 : 0; -} +static int +uop_optimize(_PyInterpreterFrame *frame, _Py_CODEUNIT *instr, + _PyExecutorObject **exec_ptr, int curr_stackentries, + bool progress_needed); /* Returns 1 if optimized, 0 if not optimized, and -1 for an error. * If optimized, *executor_ptr contains a new reference to the executor @@ -166,6 +107,7 @@ _PyOptimizer_Optimize( _PyInterpreterFrame *frame, _Py_CODEUNIT *start, _PyStackRef *stack_pointer, _PyExecutorObject **executor_ptr, int chain_depth) { + assert(_PyInterpreterState_GET()->jit); // The first executor in a chain and the MAX_CHAIN_DEPTH'th executor *must* // make progress in order to avoid infinite loops or excessively-long // side-exit chains. We can only insert the executor into the bytecode if @@ -174,12 +116,10 @@ _PyOptimizer_Optimize( bool progress_needed = chain_depth == 0; PyCodeObject *code = _PyFrame_GetCode(frame); assert(PyCode_Check(code)); - PyInterpreterState *interp = _PyInterpreterState_GET(); if (progress_needed && !has_space_for_executor(code, start)) { return 0; } - _PyOptimizerObject *opt = interp->optimizer; - int err = opt->optimize(opt, frame, start, executor_ptr, (int)(stack_pointer - _PyFrame_Stackbase(frame)), progress_needed); + int err = uop_optimize(frame, start, executor_ptr, (int)(stack_pointer - _PyFrame_Stackbase(frame)), progress_needed); if (err <= 0) { return err; } @@ -251,13 +191,6 @@ get_oparg(PyObject *self, PyObject *Py_UNUSED(ignored)) return PyLong_FromUnsignedLong(((_PyExecutorObject *)self)->vm_data.oparg); } -static PyMethodDef executor_methods[] = { - { "is_valid", is_valid, METH_NOARGS, NULL }, - { "get_opcode", get_opcode, METH_NOARGS, NULL }, - { "get_oparg", get_oparg, METH_NOARGS, NULL }, - { NULL, NULL }, -}; - ///////////////////// Experimental UOp Optimizer ///////////////////// static int executor_clear(_PyExecutorObject *executor); @@ -622,8 +555,14 @@ translate_bytecode_to_trace( goto done; } assert(opcode != ENTER_EXECUTOR && opcode != EXTENDED_ARG); - RESERVE_RAW(2, "_CHECK_VALIDITY_AND_SET_IP"); - ADD_TO_TRACE(_CHECK_VALIDITY_AND_SET_IP, 0, (uintptr_t)instr, target); + if (OPCODE_HAS_NO_SAVE_IP(opcode)) { + RESERVE_RAW(2, "_CHECK_VALIDITY"); + ADD_TO_TRACE(_CHECK_VALIDITY, 0, 0, target); + } + else { + RESERVE_RAW(2, "_CHECK_VALIDITY_AND_SET_IP"); + ADD_TO_TRACE(_CHECK_VALIDITY_AND_SET_IP, 0, (uintptr_t)instr, target); + } /* Special case the first instruction, * so that we can guarantee forward progress */ @@ -687,6 +626,7 @@ translate_bytecode_to_trace( } case JUMP_BACKWARD: + case JUMP_BACKWARD_JIT: ADD_TO_TRACE(_CHECK_PERIODIC, 0, 0, target); _Py_FALLTHROUGH; case JUMP_BACKWARD_NO_INTERRUPT: @@ -771,7 +711,7 @@ translate_bytecode_to_trace( uint32_t next_inst = target + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + (oparg > 255); uint32_t jump_target = next_inst + oparg; assert(_Py_GetBaseCodeUnit(code, jump_target).op.code == END_FOR); - assert(_Py_GetBaseCodeUnit(code, jump_target+1).op.code == POP_TOP); + assert(_Py_GetBaseCodeUnit(code, jump_target+1).op.code == POP_ITER); } #endif break; @@ -1049,7 +989,6 @@ prepare_for_execution(_PyUOpInstruction *buffer, int length) current_error = next_spare; current_error_target = target; make_exit(&buffer[next_spare], _ERROR_POP_N, 0); - buffer[next_spare].oparg = popped; buffer[next_spare].operand0 = target; next_spare++; } @@ -1244,7 +1183,6 @@ int effective_trace_length(_PyUOpInstruction *buffer, int length) static int uop_optimize( - _PyOptimizerObject *self, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _PyExecutorObject **exec_ptr, @@ -1279,15 +1217,16 @@ uop_optimize( int oparg = buffer[pc].oparg; if (_PyUop_Flags[opcode] & HAS_OPARG_AND_1_FLAG) { buffer[pc].opcode = opcode + 1 + (oparg & 1); + assert(strncmp(_PyOpcode_uop_name[buffer[pc].opcode], _PyOpcode_uop_name[opcode], strlen(_PyOpcode_uop_name[opcode])) == 0); } else if (oparg < _PyUop_Replication[opcode]) { buffer[pc].opcode = opcode + oparg + 1; + assert(strncmp(_PyOpcode_uop_name[buffer[pc].opcode], _PyOpcode_uop_name[opcode], strlen(_PyOpcode_uop_name[opcode])) == 0); } else if (is_terminator(&buffer[pc])) { break; } assert(_PyOpcode_uop_name[buffer[pc].opcode]); - assert(strncmp(_PyOpcode_uop_name[buffer[pc].opcode], _PyOpcode_uop_name[opcode], strlen(_PyOpcode_uop_name[opcode])) == 0); } OPT_HIST(effective_trace_length(buffer, length), optimized_trace_length_hist); length = prepare_for_execution(buffer, length); @@ -1301,121 +1240,6 @@ uop_optimize( return 1; } -static void -uop_opt_dealloc(PyObject *self) { - PyObject_Free(self); -} - -PyTypeObject _PyUOpOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "uop_optimizer", - .tp_basicsize = sizeof(_PyOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, - .tp_dealloc = uop_opt_dealloc, -}; - -PyObject * -_PyOptimizer_NewUOpOptimizer(void) -{ - _PyOptimizerObject *opt = PyObject_New(_PyOptimizerObject, &_PyUOpOptimizer_Type); - if (opt == NULL) { - return NULL; - } - opt->optimize = uop_optimize; - return (PyObject *)opt; -} - -static void -counter_dealloc(_PyExecutorObject *self) { - /* The optimizer is the operand of the second uop. */ - PyObject *opt = (PyObject *)self->trace[1].operand0; - Py_DECREF(opt); - uop_dealloc(self); -} - -PyTypeObject _PyCounterExecutor_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "counting_executor", - .tp_basicsize = offsetof(_PyExecutorObject, exits), - .tp_itemsize = 1, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_HAVE_GC, - .tp_dealloc = (destructor)counter_dealloc, - .tp_methods = executor_methods, - .tp_traverse = executor_traverse, - .tp_clear = (inquiry)executor_clear, -}; - -static int -counter_optimize( - _PyOptimizerObject* self, - _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr, - _PyExecutorObject **exec_ptr, - int Py_UNUSED(curr_stackentries), - bool Py_UNUSED(progress_needed) -) -{ - PyCodeObject *code = _PyFrame_GetCode(frame); - int oparg = instr->op.arg; - while (instr->op.code == EXTENDED_ARG) { - instr++; - oparg = (oparg << 8) | instr->op.arg; - } - if (instr->op.code != JUMP_BACKWARD) { - /* Counter optimizer can only handle backward edges */ - return 0; - } - _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg; - _PyUOpInstruction buffer[4] = { - { .opcode = _START_EXECUTOR, .jump_target = 3, .format=UOP_FORMAT_JUMP }, - { .opcode = _LOAD_CONST_INLINE, .operand0 = (uintptr_t)self }, - { .opcode = _INTERNAL_INCREMENT_OPT_COUNTER }, - { .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET } - }; - _PyExecutorObject *executor = make_executor_from_uops(buffer, 4, &EMPTY_FILTER); - if (executor == NULL) { - return -1; - } - Py_INCREF(self); - Py_SET_TYPE(executor, &_PyCounterExecutor_Type); - *exec_ptr = executor; - return 1; -} - -static PyObject * -counter_get_counter(PyObject *self, PyObject *args) -{ - return PyLong_FromLongLong(((_PyCounterOptimizerObject *)self)->count); -} - -static PyMethodDef counter_optimizer_methods[] = { - { "get_count", counter_get_counter, METH_NOARGS, NULL }, - { NULL, NULL }, -}; - -PyTypeObject _PyCounterOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "Counter optimizer", - .tp_basicsize = sizeof(_PyCounterOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, - .tp_methods = counter_optimizer_methods, - .tp_dealloc = (destructor)PyObject_Free, -}; - -PyObject * -_PyOptimizer_NewCounter(void) -{ - _PyCounterOptimizerObject *opt = (_PyCounterOptimizerObject *)_PyObject_New(&_PyCounterOptimizer_Type); - if (opt == NULL) { - return NULL; - } - opt->base.optimize = counter_optimize; - opt->count = 0; - return (PyObject *)opt; -} - /***************************************** * Executor management diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index 0ef15c630e91db..5dd7725f398cd1 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -109,10 +109,14 @@ convert_global_to_const(_PyUOpInstruction *inst, PyObject *obj) return NULL; } if (_Py_IsImmortal(res)) { - inst->opcode = (inst->oparg & 1) ? _LOAD_CONST_INLINE_BORROW_WITH_NULL : _LOAD_CONST_INLINE_BORROW; + inst->opcode = _LOAD_CONST_INLINE_BORROW; } else { - inst->opcode = (inst->oparg & 1) ? _LOAD_CONST_INLINE_WITH_NULL : _LOAD_CONST_INLINE; + inst->opcode = _LOAD_CONST_INLINE; + } + if (inst->oparg & 1) { + assert(inst[1].opcode == _PUSH_NULL_CONDITIONAL); + assert(inst[1].oparg & 1); } inst->operand0 = (uint64_t)res; return res; @@ -368,13 +372,17 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, #define sym_truthiness _Py_uop_sym_truthiness #define frame_new _Py_uop_frame_new #define frame_pop _Py_uop_frame_pop +#define sym_new_tuple _Py_uop_sym_new_tuple +#define sym_tuple_getitem _Py_uop_sym_tuple_getitem +#define sym_tuple_length _Py_uop_sym_tuple_length +#define sym_is_immortal _Py_uop_sym_is_immortal static int optimize_to_bool( _PyUOpInstruction *this_instr, - _Py_UOpsContext *ctx, - _Py_UopsSymbol *value, - _Py_UopsSymbol **result_ptr) + JitOptContext *ctx, + JitOptSymbol *value, + JitOptSymbol **result_ptr) { if (sym_matches_type(value, &PyBool_Type)) { REPLACE_OP(this_instr, _NOP, 0, 0); @@ -460,8 +468,8 @@ optimize_uops( ) { - _Py_UOpsContext context; - _Py_UOpsContext *ctx = &context; + JitOptContext context; + JitOptContext *ctx = &context; uint32_t opcode = UINT16_MAX; int curr_space = 0; int max_space = 0; @@ -486,7 +494,7 @@ optimize_uops( int oparg = this_instr->oparg; opcode = this_instr->opcode; - _Py_UopsSymbol **stack_pointer = ctx->frame->stack_pointer; + JitOptSymbol **stack_pointer = ctx->frame->stack_pointer; #ifdef Py_DEBUG if (get_lltrace() >= 3) { diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index a14d119b7a1dec..91573e82841cc9 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -6,8 +6,6 @@ #define op(name, ...) /* NAME is ignored */ -typedef struct _Py_UopsSymbol _Py_UopsSymbol; -typedef struct _Py_UOpsContext _Py_UOpsContext; typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; /* Shortened forms for convenience */ @@ -32,13 +30,17 @@ typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; #define sym_is_bottom _Py_uop_sym_is_bottom #define frame_new _Py_uop_frame_new #define frame_pop _Py_uop_frame_pop +#define sym_new_tuple _Py_uop_sym_new_tuple +#define sym_tuple_getitem _Py_uop_sym_tuple_getitem +#define sym_tuple_length _Py_uop_sym_tuple_length +#define sym_is_immortal _Py_uop_sym_is_immortal extern int optimize_to_bool( _PyUOpInstruction *this_instr, - _Py_UOpsContext *ctx, - _Py_UopsSymbol *value, - _Py_UopsSymbol **result_ptr); + JitOptContext *ctx, + JitOptSymbol *value, + JitOptSymbol **result_ptr); extern void eliminate_pop_guard(_PyUOpInstruction *this_instr, bool exit); @@ -50,17 +52,17 @@ dummy_func(void) { PyCodeObject *co; int oparg; - _Py_UopsSymbol *flag; - _Py_UopsSymbol *left; - _Py_UopsSymbol *right; - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; - _Py_UopsSymbol *iter; - _Py_UopsSymbol *top; - _Py_UopsSymbol *bottom; + JitOptSymbol *flag; + JitOptSymbol *left; + JitOptSymbol *right; + JitOptSymbol *value; + JitOptSymbol *res; + JitOptSymbol *iter; + JitOptSymbol *top; + JitOptSymbol *bottom; _Py_UOpsAbstractFrame *frame; _Py_UOpsAbstractFrame *new_frame; - _Py_UOpsContext *ctx; + JitOptContext *ctx; _PyUOpInstruction *this_instr; _PyBloomFilter *dependencies; int modified; @@ -85,7 +87,7 @@ dummy_func(void) { op(_LOAD_FAST_AND_CLEAR, (-- value)) { value = GETLOCAL(oparg); - _Py_UopsSymbol *temp = sym_new_null(ctx); + JitOptSymbol *temp = sym_new_null(ctx); GETLOCAL(oparg) = temp; } @@ -167,23 +169,56 @@ dummy_func(void) { } op(_BINARY_OP, (left, right -- res)) { - PyTypeObject *ltype = sym_get_type(left); - PyTypeObject *rtype = sym_get_type(right); - if (ltype != NULL && (ltype == &PyLong_Type || ltype == &PyFloat_Type) && - rtype != NULL && (rtype == &PyLong_Type || rtype == &PyFloat_Type)) - { - if (oparg != NB_TRUE_DIVIDE && oparg != NB_INPLACE_TRUE_DIVIDE && - ltype == &PyLong_Type && rtype == &PyLong_Type) { - /* If both inputs are ints and the op is not division the result is an int */ - res = sym_new_type(ctx, &PyLong_Type); + bool lhs_int = sym_matches_type(left, &PyLong_Type); + bool rhs_int = sym_matches_type(right, &PyLong_Type); + bool lhs_float = sym_matches_type(left, &PyFloat_Type); + bool rhs_float = sym_matches_type(right, &PyFloat_Type); + if (!((lhs_int || lhs_float) && (rhs_int || rhs_float))) { + // There's something other than an int or float involved: + res = sym_new_unknown(ctx); + } + else if (oparg == NB_POWER || oparg == NB_INPLACE_POWER) { + // This one's fun... the *type* of the result depends on the + // *values* being exponentiated. However, exponents with one + // constant part are reasonably common, so it's probably worth + // trying to infer some simple cases: + // - A: 1 ** 1 -> 1 (int ** int -> int) + // - B: 1 ** -1 -> 1.0 (int ** int -> float) + // - C: 1.0 ** 1 -> 1.0 (float ** int -> float) + // - D: 1 ** 1.0 -> 1.0 (int ** float -> float) + // - E: -1 ** 0.5 ~> 1j (int ** float -> complex) + // - F: 1.0 ** 1.0 -> 1.0 (float ** float -> float) + // - G: -1.0 ** 0.5 ~> 1j (float ** float -> complex) + if (rhs_float) { + // Case D, E, F, or G... can't know without the sign of the LHS + // or whether the RHS is whole, which isn't worth the effort: + res = sym_new_unknown(ctx); } - else { - /* For any other op combining ints/floats the result is a float */ + else if (lhs_float) { + // Case C: + res = sym_new_type(ctx, &PyFloat_Type); + } + else if (!sym_is_const(right)) { + // Case A or B... can't know without the sign of the RHS: + res = sym_new_unknown(ctx); + } + else if (_PyLong_IsNegative((PyLongObject *)sym_get_const(right))) { + // Case B: res = sym_new_type(ctx, &PyFloat_Type); } + else { + // Case A: + res = sym_new_type(ctx, &PyLong_Type); + } + } + else if (oparg == NB_TRUE_DIVIDE || oparg == NB_INPLACE_TRUE_DIVIDE) { + res = sym_new_type(ctx, &PyFloat_Type); + } + else if (lhs_int && rhs_int) { + res = sym_new_type(ctx, &PyLong_Type); } else { - res = sym_new_unknown(ctx); + res = sym_new_type(ctx, &PyFloat_Type); } } @@ -332,7 +367,7 @@ dummy_func(void) { } op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right -- )) { - _Py_UopsSymbol *res; + JitOptSymbol *res; if (sym_is_const(left) && sym_is_const(right) && sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) { PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right)); @@ -446,6 +481,13 @@ dummy_func(void) { value = sym_new_const(ctx, val); } + op(_LOAD_CONST_MORTAL, (-- value)) { + PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); + int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE; + REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val); + value = sym_new_const(ctx, val); + } + op(_LOAD_CONST_IMMORTAL, (-- value)) { PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val); @@ -465,30 +507,20 @@ dummy_func(void) { value = sym_new_const(ctx, ptr); } - op(_LOAD_CONST_INLINE_WITH_NULL, (ptr/4 -- value, null)) { - value = sym_new_const(ctx, ptr); - null = sym_new_null(ctx); - } - - op(_LOAD_CONST_INLINE_BORROW_WITH_NULL, (ptr/4 -- value, null)) { - value = sym_new_const(ctx, ptr); - null = sym_new_null(ctx); - } - op(_COPY, (bottom, unused[oparg-1] -- bottom, unused[oparg-1], top)) { assert(oparg > 0); top = bottom; } - op(_SWAP, (bottom_in, unused[oparg-2], top_in -- - top_out, unused[oparg-2], bottom_out)) { - bottom_out = bottom_in; - top_out = top_in; + op(_SWAP, (bottom[1], unused[oparg-2], top[1] -- bottom[1], unused[oparg-2], top[1])) { + JitOptSymbol *temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; + assert(oparg >= 2); } - op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr)) { attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)offset; (void)owner; } @@ -511,15 +543,22 @@ dummy_func(void) { } } - op(_LOAD_ATTR, (owner -- attr, self_or_null if (oparg & 1))) { + op (_PUSH_NULL_CONDITIONAL, ( -- null if (oparg & 1))) { + int opcode = (oparg & 1) ? _PUSH_NULL : _NOP; + REPLACE_OP(this_instr, opcode, 0, 0); + null = sym_new_null(ctx); + } + + op(_LOAD_ATTR, (owner -- attr, self_or_null[oparg&1])) { (void)owner; attr = sym_new_not_null(ctx); - self_or_null = sym_new_unknown(ctx); + if (oparg &1) { + self_or_null[0] = sym_new_unknown(ctx); + } } - op(_LOAD_ATTR_MODULE_FROM_KEYS, (index/1, owner, mod_keys -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_MODULE_FROM_KEYS, (index/1, owner, mod_keys -- attr)) { (void)index; - null = sym_new_null(ctx); attr = NULL; if (this_instr[-1].opcode == _NOP) { // Preceding _CHECK_ATTR_MODULE_PUSH_KEYS was removed: mod is const and dict is watched. @@ -542,40 +581,43 @@ dummy_func(void) { } } - op(_LOAD_ATTR_WITH_HINT, (hint/1, owner -- attr, null if (oparg & 1))) { + op(_CHECK_ATTR_WITH_HINT, (owner -- owner, dict)) { + dict = sym_new_not_null(ctx); + (void)owner; + } + + op(_LOAD_ATTR_WITH_HINT, (hint/1, owner, dict -- attr)) { attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)hint; (void)owner; + (void)dict; } - op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_SLOT, (index/1, owner -- attr)) { attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)index; (void)owner; } - op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr, null if (oparg & 1))) { + op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr)) { attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)descr; (void)owner; } - op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self if (1))) { + op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self)) { (void)descr; attr = sym_new_not_null(ctx); self = owner; } - op(_LOAD_ATTR_METHOD_NO_DICT, (descr/4, owner -- attr, self if (1))) { + op(_LOAD_ATTR_METHOD_NO_DICT, (descr/4, owner -- attr, self)) { (void)descr; attr = sym_new_not_null(ctx); self = owner; } - op(_LOAD_ATTR_METHOD_LAZY_DICT, (descr/4, owner -- attr, self if (1))) { + op(_LOAD_ATTR_METHOD_LAZY_DICT, (descr/4, owner -- attr, self)) { (void)descr; attr = sym_new_not_null(ctx); self = owner; @@ -588,10 +630,10 @@ dummy_func(void) { ctx->done = true; } - op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable, unused, unused[oparg] -- func, self, unused[oparg])) { + op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable[1], self_or_null[1], unused[oparg] -- callable[1], self_or_null[1], unused[oparg])) { (void)callable; - func = sym_new_not_null(ctx); - self = sym_new_not_null(ctx); + callable[0] = sym_new_not_null(ctx); + self_or_null[0] = sym_new_not_null(ctx); } op(_CHECK_FUNCTION_VERSION, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { @@ -903,6 +945,22 @@ dummy_func(void) { res = sym_new_const(ctx, Py_True); } + op(_BUILD_TUPLE, (values[oparg] -- tup)) { + tup = sym_new_tuple(ctx, oparg, values); + } + + op(_UNPACK_SEQUENCE_TWO_TUPLE, (seq -- val1, val0)) { + val0 = sym_tuple_getitem(ctx, seq, 0); + val1 = sym_tuple_getitem(ctx, seq, 1); + } + + op(_UNPACK_SEQUENCE_TUPLE, (seq -- values[oparg])) { + for (int i = 0; i < oparg; i++) { + values[i] = sym_tuple_getitem(ctx, seq, i); + } + } + + // END BYTECODES // } diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 0fcf5e18ed5808..144c7c503b2661 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -26,7 +26,7 @@ /* _MONITOR_RESUME is not a viable micro-op for tier 2 */ case _LOAD_FAST_CHECK: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = GETLOCAL(oparg); // We guarantee this will error - just bail and don't optimize it. if (sym_is_null(value)) { @@ -39,7 +39,7 @@ } case _LOAD_FAST: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = GETLOCAL(oparg); stack_pointer[0] = value; stack_pointer += 1; @@ -48,9 +48,9 @@ } case _LOAD_FAST_AND_CLEAR: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = GETLOCAL(oparg); - _Py_UopsSymbol *temp = sym_new_null(ctx); + JitOptSymbol *temp = sym_new_null(ctx); GETLOCAL(oparg) = temp; stack_pointer[0] = value; stack_pointer += 1; @@ -58,8 +58,10 @@ break; } - case _LOAD_CONST: { - _Py_UopsSymbol *value; + /* _LOAD_CONST is not a viable micro-op for tier 2 */ + + case _LOAD_CONST_MORTAL: { + JitOptSymbol *value; PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE; REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val); @@ -71,7 +73,7 @@ } case _LOAD_CONST_IMMORTAL: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val); value = sym_new_const(ctx, val); @@ -82,7 +84,7 @@ } case _LOAD_SMALL_INT: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *val = PyLong_FromLong(this_instr->oparg); value = sym_new_const(ctx, val); stack_pointer[0] = value; @@ -92,7 +94,7 @@ } case _STORE_FAST: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = stack_pointer[-1]; GETLOCAL(oparg) = value; stack_pointer += -1; @@ -107,7 +109,7 @@ } case _PUSH_NULL: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -115,8 +117,14 @@ break; } + case _END_FOR: { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + break; + } + case _END_SEND: { - _Py_UopsSymbol *val; + JitOptSymbol *val; val = sym_new_not_null(ctx); stack_pointer[-2] = val; stack_pointer += -1; @@ -125,22 +133,22 @@ } case _UNARY_NEGATIVE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _UNARY_NOT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _TO_BOOL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { res = sym_new_type(ctx, &PyBool_Type); @@ -150,8 +158,8 @@ } case _TO_BOOL_BOOL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_type(value, &PyBool_Type); @@ -162,8 +170,8 @@ } case _TO_BOOL_INT: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_type(value, &PyLong_Type); @@ -174,8 +182,8 @@ } case _TO_BOOL_LIST: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_type(value, &PyList_Type); @@ -186,8 +194,8 @@ } case _TO_BOOL_NONE: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_const(value, Py_None); @@ -198,8 +206,8 @@ } case _TO_BOOL_STR: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { res = sym_new_type(ctx, &PyBool_Type); @@ -210,22 +218,22 @@ } case _REPLACE_WITH_TRUE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_const(ctx, Py_True); stack_pointer[-1] = res; break; } case _UNARY_INVERT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _GUARD_BOTH_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_matches_type(left, &PyLong_Type)) { @@ -255,9 +263,9 @@ } case _BINARY_OP_MULTIPLY_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -271,23 +279,26 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); // TODO gh-115506: // replace opcode with constant propagated one and add tests! } else { res = sym_new_type(ctx, &PyLong_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _BINARY_OP_ADD_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -301,23 +312,26 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); // TODO gh-115506: // replace opcode with constant propagated one and add tests! } else { res = sym_new_type(ctx, &PyLong_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _BINARY_OP_SUBTRACT_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -331,22 +345,25 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); // TODO gh-115506: // replace opcode with constant propagated one and add tests! } else { res = sym_new_type(ctx, &PyLong_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _GUARD_BOTH_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_matches_type(left, &PyFloat_Type)) { @@ -376,9 +393,9 @@ } case _BINARY_OP_MULTIPLY_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -393,23 +410,26 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); // TODO gh-115506: // replace opcode with constant propagated one and update tests! } else { res = sym_new_type(ctx, &PyFloat_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _BINARY_OP_ADD_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -424,23 +444,26 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); // TODO gh-115506: // replace opcode with constant propagated one and update tests! } else { res = sym_new_type(ctx, &PyFloat_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _BINARY_OP_SUBTRACT_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -455,22 +478,25 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); // TODO gh-115506: // replace opcode with constant propagated one and update tests! } else { res = sym_new_type(ctx, &PyFloat_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _GUARD_BOTH_UNICODE: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_matches_type(left, &PyUnicode_Type) && @@ -483,9 +509,9 @@ } case _BINARY_OP_ADD_UNICODE: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -495,23 +521,26 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); } else { res = sym_new_type(ctx, &PyUnicode_Type); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); + stack_pointer[-1] = res; break; } case _BINARY_OP_INPLACE_ADD_UNICODE: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; - _Py_UopsSymbol *res; + JitOptSymbol *res; if (sym_is_const(left) && sym_is_const(right) && sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) { PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right)); @@ -519,20 +548,35 @@ goto error; } res = sym_new_const(ctx, temp); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); Py_DECREF(temp); } else { res = sym_new_type(ctx, &PyUnicode_Type); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); } // _STORE_FAST: GETLOCAL(this_instr->operand0) = res; - stack_pointer += -2; + break; + } + + case _GUARD_BINARY_OP_EXTEND: { + break; + } + + case _BINARY_OP_EXTEND: { + JitOptSymbol *res; + res = sym_new_not_null(ctx); + stack_pointer[-2] = res; + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; } case _BINARY_SUBSCR: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -541,7 +585,7 @@ } case _BINARY_SLICE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -556,7 +600,7 @@ } case _BINARY_SUBSCR_LIST_INT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -565,7 +609,7 @@ } case _BINARY_SUBSCR_STR_INT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -574,7 +618,7 @@ } case _BINARY_SUBSCR_TUPLE_INT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -583,7 +627,7 @@ } case _BINARY_SUBSCR_DICT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -592,7 +636,7 @@ } case _BINARY_SUBSCR_CHECK_FUNC: { - _Py_UopsSymbol *getitem; + JitOptSymbol *getitem; getitem = sym_new_not_null(ctx); stack_pointer[0] = getitem; stack_pointer += 1; @@ -601,9 +645,9 @@ } case _BINARY_SUBSCR_INIT_CALL: { - _Py_UopsSymbol *getitem; - _Py_UopsSymbol *sub; - _Py_UopsSymbol *container; + JitOptSymbol *getitem; + JitOptSymbol *sub; + JitOptSymbol *container; _Py_UOpsAbstractFrame *new_frame; getitem = stack_pointer[-1]; sub = stack_pointer[-2]; @@ -613,7 +657,7 @@ (void)getitem; new_frame = NULL; ctx->done = true; - stack_pointer[-3] = (_Py_UopsSymbol *)new_frame; + stack_pointer[-3] = (JitOptSymbol *)new_frame; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -656,14 +700,14 @@ } case _CALL_INTRINSIC_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _CALL_INTRINSIC_2: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -672,8 +716,8 @@ } case _RETURN_VALUE: { - _Py_UopsSymbol *retval; - _Py_UopsSymbol *res; + JitOptSymbol *retval; + JitOptSymbol *res; retval = stack_pointer[-1]; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -700,14 +744,14 @@ } case _GET_AITER: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; } case _GET_ANEXT: { - _Py_UopsSymbol *awaitable; + JitOptSymbol *awaitable; awaitable = sym_new_not_null(ctx); stack_pointer[0] = awaitable; stack_pointer += 1; @@ -716,7 +760,7 @@ } case _GET_AWAITABLE: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; @@ -731,7 +775,7 @@ } case _YIELD_VALUE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_unknown(ctx); stack_pointer[-1] = res; break; @@ -744,7 +788,7 @@ } case _LOAD_COMMON_CONSTANT: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[0] = value; stack_pointer += 1; @@ -753,7 +797,7 @@ } case _LOAD_BUILD_CLASS: { - _Py_UopsSymbol *bc; + JitOptSymbol *bc; bc = sym_new_not_null(ctx); stack_pointer[0] = bc; stack_pointer += 1; @@ -772,8 +816,8 @@ } case _UNPACK_SEQUENCE: { - _Py_UopsSymbol *seq; - _Py_UopsSymbol **values; + JitOptSymbol *seq; + JitOptSymbol **values; seq = stack_pointer[-1]; values = &stack_pointer[-1]; /* This has to be done manually */ @@ -787,10 +831,12 @@ } case _UNPACK_SEQUENCE_TWO_TUPLE: { - _Py_UopsSymbol *val1; - _Py_UopsSymbol *val0; - val1 = sym_new_not_null(ctx); - val0 = sym_new_not_null(ctx); + JitOptSymbol *seq; + JitOptSymbol *val1; + JitOptSymbol *val0; + seq = stack_pointer[-1]; + val0 = sym_tuple_getitem(ctx, seq, 0); + val1 = sym_tuple_getitem(ctx, seq, 1); stack_pointer[-1] = val1; stack_pointer[0] = val0; stack_pointer += 1; @@ -799,10 +845,12 @@ } case _UNPACK_SEQUENCE_TUPLE: { - _Py_UopsSymbol **values; + JitOptSymbol *seq; + JitOptSymbol **values; + seq = stack_pointer[-1]; values = &stack_pointer[-1]; - for (int _i = oparg; --_i >= 0;) { - values[_i] = sym_new_not_null(ctx); + for (int i = 0; i < oparg; i++) { + values[i] = sym_tuple_getitem(ctx, seq, i); } stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); @@ -810,7 +858,7 @@ } case _UNPACK_SEQUENCE_LIST: { - _Py_UopsSymbol **values; + JitOptSymbol **values; values = &stack_pointer[-1]; for (int _i = oparg; --_i >= 0;) { values[_i] = sym_new_not_null(ctx); @@ -821,8 +869,8 @@ } case _UNPACK_EX: { - _Py_UopsSymbol *seq; - _Py_UopsSymbol **values; + JitOptSymbol *seq; + JitOptSymbol **values; seq = stack_pointer[-1]; values = &stack_pointer[-1]; /* This has to be done manually */ @@ -859,7 +907,7 @@ } case _LOAD_LOCALS: { - _Py_UopsSymbol *locals; + JitOptSymbol *locals; locals = sym_new_not_null(ctx); stack_pointer[0] = locals; stack_pointer += 1; @@ -870,7 +918,7 @@ /* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 */ case _LOAD_NAME: { - _Py_UopsSymbol *v; + JitOptSymbol *v; v = sym_new_not_null(ctx); stack_pointer[0] = v; stack_pointer += 1; @@ -879,13 +927,21 @@ } case _LOAD_GLOBAL: { - _Py_UopsSymbol **res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol **res; res = &stack_pointer[0]; res[0] = sym_new_not_null(ctx); + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + break; + } + + case _PUSH_NULL_CONDITIONAL: { + JitOptSymbol *null = NULL; + int opcode = (oparg & 1) ? _PUSH_NULL : _NOP; + REPLACE_OP(this_instr, opcode, 0, 0); null = sym_new_null(ctx); - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + if (oparg & 1) stack_pointer[0] = null; + stack_pointer += (oparg & 1); assert(WITHIN_STACK_BOUNDS()); break; } @@ -895,7 +951,7 @@ } case _GUARD_GLOBALS_VERSION_PUSH_KEYS: { - _Py_UopsSymbol *globals_keys; + JitOptSymbol *globals_keys; uint16_t version = (uint16_t)this_instr->operand0; globals_keys = sym_new_unknown(ctx); (void)version; @@ -906,7 +962,7 @@ } case _GUARD_BUILTINS_VERSION_PUSH_KEYS: { - _Py_UopsSymbol *builtins_keys; + JitOptSymbol *builtins_keys; uint16_t version = (uint16_t)this_instr->operand0; builtins_keys = sym_new_unknown(ctx); (void)version; @@ -917,26 +973,16 @@ } case _LOAD_GLOBAL_MODULE_FROM_KEYS: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; res = sym_new_not_null(ctx); - null = sym_new_null(ctx); stack_pointer[-1] = res; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_GLOBAL_BUILTINS_FROM_KEYS: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; res = sym_new_not_null(ctx); - null = sym_new_null(ctx); stack_pointer[-1] = res; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); break; } @@ -953,14 +999,14 @@ } case _LOAD_FROM_DICT_OR_DEREF: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[-1] = value; break; } case _LOAD_DEREF: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[0] = value; stack_pointer += 1; @@ -979,7 +1025,7 @@ } case _BUILD_STRING: { - _Py_UopsSymbol *str; + JitOptSymbol *str; str = sym_new_not_null(ctx); stack_pointer[-oparg] = str; stack_pointer += 1 - oparg; @@ -988,8 +1034,10 @@ } case _BUILD_TUPLE: { - _Py_UopsSymbol *tup; - tup = sym_new_not_null(ctx); + JitOptSymbol **values; + JitOptSymbol *tup; + values = &stack_pointer[-oparg]; + tup = sym_new_tuple(ctx, oparg, values); stack_pointer[-oparg] = tup; stack_pointer += 1 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -997,7 +1045,7 @@ } case _BUILD_LIST: { - _Py_UopsSymbol *list; + JitOptSymbol *list; list = sym_new_not_null(ctx); stack_pointer[-oparg] = list; stack_pointer += 1 - oparg; @@ -1018,7 +1066,7 @@ } case _BUILD_SET: { - _Py_UopsSymbol *set; + JitOptSymbol *set; set = sym_new_not_null(ctx); stack_pointer[-oparg] = set; stack_pointer += 1 - oparg; @@ -1027,7 +1075,7 @@ } case _BUILD_MAP: { - _Py_UopsSymbol *map; + JitOptSymbol *map; map = sym_new_not_null(ctx); stack_pointer[-oparg*2] = map; stack_pointer += 1 - oparg*2; @@ -1060,7 +1108,7 @@ /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */ case _LOAD_SUPER_ATTR_ATTR: { - _Py_UopsSymbol *attr_st; + JitOptSymbol *attr_st; attr_st = sym_new_not_null(ctx); stack_pointer[-3] = attr_st; stack_pointer += -2; @@ -1069,8 +1117,8 @@ } case _LOAD_SUPER_ATTR_METHOD: { - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self_or_null; + JitOptSymbol *attr; + JitOptSymbol *self_or_null; attr = sym_new_not_null(ctx); self_or_null = sym_new_not_null(ctx); stack_pointer[-3] = attr; @@ -1081,22 +1129,24 @@ } case _LOAD_ATTR: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self_or_null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol **self_or_null; owner = stack_pointer[-1]; + self_or_null = &stack_pointer[0]; (void)owner; attr = sym_new_not_null(ctx); - self_or_null = sym_new_unknown(ctx); + if (oparg &1) { + self_or_null[0] = sym_new_unknown(ctx); + } stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = self_or_null; - stack_pointer += (oparg & 1); + stack_pointer += (oparg&1); assert(WITHIN_STACK_BOUNDS()); break; } case _GUARD_TYPE_VERSION: { - _Py_UopsSymbol *owner; + JitOptSymbol *owner; owner = stack_pointer[-1]; uint32_t type_version = (uint32_t)this_instr->operand0; assert(type_version); @@ -1130,25 +1180,20 @@ } case _LOAD_ATTR_INSTANCE_VALUE: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; owner = stack_pointer[-1]; uint16_t offset = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)offset; (void)owner; stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); break; } case _CHECK_ATTR_MODULE_PUSH_KEYS: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *mod_keys; + JitOptSymbol *owner; + JitOptSymbol *mod_keys; owner = stack_pointer[-1]; uint32_t dict_version = (uint32_t)this_instr->operand0; (void)dict_version; @@ -1178,13 +1223,11 @@ } case _LOAD_ATTR_MODULE_FROM_KEYS: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; owner = stack_pointer[-2]; uint16_t index = (uint16_t)this_instr->operand0; (void)index; - null = sym_new_null(ctx); attr = NULL; if (this_instr[-1].opcode == _NOP) { // Preceding _CHECK_ATTR_MODULE_PUSH_KEYS was removed: mod is const and dict is watched. @@ -1193,8 +1236,7 @@ assert(PyModule_CheckExact(mod)); PyObject *dict = mod->md_dict; stack_pointer[-2] = attr; - if (oparg & 1) stack_pointer[-1] = null; - stack_pointer += -1 + (oparg & 1); + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); PyObject *res = convert_global_to_const(this_instr, dict); if (res != NULL) { @@ -1204,7 +1246,7 @@ else { this_instr->opcode = _LOAD_ATTR_MODULE; } - stack_pointer += 1 - (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); } if (attr == NULL) { @@ -1212,47 +1254,49 @@ attr = sym_new_not_null(ctx); } stack_pointer[-2] = attr; - if (oparg & 1) stack_pointer[-1] = null; - stack_pointer += -1 + (oparg & 1); + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; } case _CHECK_ATTR_WITH_HINT: { + JitOptSymbol *owner; + JitOptSymbol *dict; + owner = stack_pointer[-1]; + dict = sym_new_not_null(ctx); + (void)owner; + stack_pointer[0] = dict; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_ATTR_WITH_HINT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; - owner = stack_pointer[-1]; + JitOptSymbol *dict; + JitOptSymbol *owner; + JitOptSymbol *attr; + dict = stack_pointer[-1]; + owner = stack_pointer[-2]; uint16_t hint = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)hint; (void)owner; - stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); + (void)dict; + stack_pointer[-2] = attr; + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_ATTR_SLOT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; owner = stack_pointer[-1]; uint16_t index = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)index; (void)owner; stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); break; } @@ -1261,24 +1305,19 @@ } case _LOAD_ATTR_CLASS: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); (void)descr; (void)owner; stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_ATTR_PROPERTY_FRAME: { - _Py_UopsSymbol *owner; + JitOptSymbol *owner; _Py_UOpsAbstractFrame *new_frame; owner = stack_pointer[-1]; PyObject *fget = (PyObject *)this_instr->operand0; @@ -1286,7 +1325,7 @@ (void)owner; new_frame = NULL; ctx->done = true; - stack_pointer[-1] = (_Py_UopsSymbol *)new_frame; + stack_pointer[-1] = (JitOptSymbol *)new_frame; break; } @@ -1315,9 +1354,9 @@ } case _COMPARE_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1339,9 +1378,9 @@ } case _COMPARE_OP_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1354,9 +1393,9 @@ } case _COMPARE_OP_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1369,9 +1408,9 @@ } case _COMPARE_OP_STR: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1384,9 +1423,9 @@ } case _IS_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1399,9 +1438,9 @@ } case _CONTAINS_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1414,7 +1453,7 @@ } case _CONTAINS_OP_SET: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-2] = b; stack_pointer += -1; @@ -1423,7 +1462,7 @@ } case _CONTAINS_OP_DICT: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-2] = b; stack_pointer += -1; @@ -1432,8 +1471,8 @@ } case _CHECK_EG_MATCH: { - _Py_UopsSymbol *rest; - _Py_UopsSymbol *match; + JitOptSymbol *rest; + JitOptSymbol *match; rest = sym_new_not_null(ctx); match = sym_new_not_null(ctx); stack_pointer[-2] = rest; @@ -1442,14 +1481,14 @@ } case _CHECK_EXC_MATCH: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-1] = b; break; } case _IMPORT_NAME: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -1458,7 +1497,7 @@ } case _IMPORT_FROM: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1471,14 +1510,14 @@ /* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */ case _IS_NONE: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-1] = b; break; } case _GET_LEN: { - _Py_UopsSymbol *len; + JitOptSymbol *len; len = sym_new_not_null(ctx); stack_pointer[0] = len; stack_pointer += 1; @@ -1487,7 +1526,7 @@ } case _MATCH_CLASS: { - _Py_UopsSymbol *attrs; + JitOptSymbol *attrs; attrs = sym_new_not_null(ctx); stack_pointer[-3] = attrs; stack_pointer += -2; @@ -1496,7 +1535,7 @@ } case _MATCH_MAPPING: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1505,7 +1544,7 @@ } case _MATCH_SEQUENCE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1514,7 +1553,7 @@ } case _MATCH_KEYS: { - _Py_UopsSymbol *values_or_none; + JitOptSymbol *values_or_none; values_or_none = sym_new_not_null(ctx); stack_pointer[0] = values_or_none; stack_pointer += 1; @@ -1523,14 +1562,14 @@ } case _GET_ITER: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; } case _GET_YIELD_FROM_ITER: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; @@ -1539,7 +1578,7 @@ /* _FOR_ITER is not a viable micro-op for tier 2 */ case _FOR_ITER_TIER_TWO: { - _Py_UopsSymbol *next; + JitOptSymbol *next; next = sym_new_not_null(ctx); stack_pointer[0] = next; stack_pointer += 1; @@ -1560,7 +1599,7 @@ } case _ITER_NEXT_LIST: { - _Py_UopsSymbol *next; + JitOptSymbol *next; next = sym_new_not_null(ctx); stack_pointer[0] = next; stack_pointer += 1; @@ -1579,7 +1618,7 @@ } case _ITER_NEXT_TUPLE: { - _Py_UopsSymbol *next; + JitOptSymbol *next; next = sym_new_not_null(ctx); stack_pointer[0] = next; stack_pointer += 1; @@ -1598,8 +1637,8 @@ } case _ITER_NEXT_RANGE: { - _Py_UopsSymbol *iter; - _Py_UopsSymbol *next; + JitOptSymbol *iter; + JitOptSymbol *next; iter = stack_pointer[-1]; next = sym_new_type(ctx, &PyLong_Type); (void)iter; @@ -1616,9 +1655,9 @@ } case _LOAD_SPECIAL: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self_or_null; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self_or_null; owner = stack_pointer[-1]; (void)owner; attr = sym_new_not_null(ctx); @@ -1631,7 +1670,7 @@ } case _WITH_EXCEPT_START: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1640,8 +1679,8 @@ } case _PUSH_EXC_INFO: { - _Py_UopsSymbol *prev_exc; - _Py_UopsSymbol *new_exc; + JitOptSymbol *prev_exc; + JitOptSymbol *new_exc; prev_exc = sym_new_not_null(ctx); new_exc = sym_new_not_null(ctx); stack_pointer[-1] = prev_exc; @@ -1660,9 +1699,9 @@ } case _LOAD_ATTR_METHOD_WITH_VALUES: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; @@ -1676,9 +1715,9 @@ } case _LOAD_ATTR_METHOD_NO_DICT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; @@ -1692,14 +1731,14 @@ } case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: { - _Py_UopsSymbol *attr; + JitOptSymbol *attr; attr = sym_new_not_null(ctx); stack_pointer[-1] = attr; break; } case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: { - _Py_UopsSymbol *attr; + JitOptSymbol *attr; attr = sym_new_not_null(ctx); stack_pointer[-1] = attr; break; @@ -1710,9 +1749,9 @@ } case _LOAD_ATTR_METHOD_LAZY_DICT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; @@ -1726,11 +1765,11 @@ } case _MAYBE_EXPAND_METHOD: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; - _Py_UopsSymbol *func; - _Py_UopsSymbol *maybe_self; + JitOptSymbol **args; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; + JitOptSymbol *func; + JitOptSymbol *maybe_self; args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1750,8 +1789,8 @@ /* _MONITOR_CALL is not a viable micro-op for tier 2 */ case _PY_FRAME_GENERAL: { - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; _Py_UOpsAbstractFrame *new_frame; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1767,15 +1806,15 @@ break; } new_frame = frame_new(ctx, co, 0, NULL, 0); - stack_pointer[0] = (_Py_UopsSymbol *)new_frame; + stack_pointer[0] = (JitOptSymbol *)new_frame; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } case _CHECK_FUNCTION_VERSION: { - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; uint32_t func_version = (uint32_t)this_instr->operand0; @@ -1798,12 +1837,6 @@ } case _EXPAND_METHOD: { - _Py_UopsSymbol **method; - _Py_UopsSymbol **self; - method = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; - method[0] = sym_new_not_null(ctx); - self[0] = sym_new_not_null(ctx); break; } @@ -1812,7 +1845,7 @@ } case _CALL_NON_PY_GENERAL: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -1821,8 +1854,8 @@ } case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: { - _Py_UopsSymbol *null; - _Py_UopsSymbol *callable; + JitOptSymbol *null; + JitOptSymbol *callable; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; sym_set_null(null); @@ -1831,15 +1864,13 @@ } case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { - _Py_UopsSymbol *callable; - _Py_UopsSymbol *func; - _Py_UopsSymbol *self; - callable = stack_pointer[-2 - oparg]; + JitOptSymbol **self_or_null; + JitOptSymbol **callable; + self_or_null = &stack_pointer[-1 - oparg]; + callable = &stack_pointer[-2 - oparg]; (void)callable; - func = sym_new_not_null(ctx); - self = sym_new_not_null(ctx); - stack_pointer[-2 - oparg] = func; - stack_pointer[-1 - oparg] = self; + callable[0] = sym_new_not_null(ctx); + self_or_null[0] = sym_new_not_null(ctx); break; } @@ -1853,8 +1884,8 @@ } case _CHECK_FUNCTION_EXACT_ARGS: { - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; assert(sym_matches_type(callable, &PyFunction_Type)); @@ -1878,9 +1909,9 @@ } case _INIT_CALL_PY_EXACT_ARGS: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol **args; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; _Py_UOpsAbstractFrame *new_frame; args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; @@ -1908,7 +1939,7 @@ } else { new_frame = frame_new(ctx, co, 0, NULL, 0); } - stack_pointer[0] = (_Py_UopsSymbol *)new_frame; + stack_pointer[0] = (JitOptSymbol *)new_frame; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1953,7 +1984,7 @@ } case _CALL_TYPE_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -1962,7 +1993,7 @@ } case _CALL_STR_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -1971,7 +2002,7 @@ } case _CALL_TUPLE_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -1980,11 +2011,11 @@ } case _CHECK_AND_ALLOCATE_OBJECT: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *null; - _Py_UopsSymbol *callable; - _Py_UopsSymbol *self; - _Py_UopsSymbol *init; + JitOptSymbol **args; + JitOptSymbol *null; + JitOptSymbol *callable; + JitOptSymbol *self; + JitOptSymbol *init; args = &stack_pointer[-oparg]; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -2002,9 +2033,9 @@ } case _CREATE_INIT_FRAME: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *init; - _Py_UopsSymbol *self; + JitOptSymbol **args; + JitOptSymbol *init; + JitOptSymbol *self; _Py_UOpsAbstractFrame *init_frame; args = &stack_pointer[-oparg]; init = stack_pointer[-1 - oparg]; @@ -2014,7 +2045,7 @@ (void)args; init_frame = NULL; ctx->done = true; - stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)init_frame; + stack_pointer[-2 - oparg] = (JitOptSymbol *)init_frame; stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); break; @@ -2027,7 +2058,7 @@ } case _CALL_BUILTIN_CLASS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2036,7 +2067,7 @@ } case _CALL_BUILTIN_O: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2045,7 +2076,7 @@ } case _CALL_BUILTIN_FAST: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2054,7 +2085,7 @@ } case _CALL_BUILTIN_FAST_WITH_KEYWORDS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2063,7 +2094,7 @@ } case _CALL_LEN: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2072,7 +2103,7 @@ } case _CALL_ISINSTANCE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2087,7 +2118,7 @@ } case _CALL_METHOD_DESCRIPTOR_O: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2096,7 +2127,7 @@ } case _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2105,7 +2136,7 @@ } case _CALL_METHOD_DESCRIPTOR_NOARGS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2114,7 +2145,7 @@ } case _CALL_METHOD_DESCRIPTOR_FAST: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2125,10 +2156,10 @@ /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */ case _MAYBE_EXPAND_METHOD_KW: { - _Py_UopsSymbol **func; - _Py_UopsSymbol **maybe_self; - _Py_UopsSymbol **args; - _Py_UopsSymbol *kwnames_out; + JitOptSymbol **func; + JitOptSymbol **maybe_self; + JitOptSymbol **args; + JitOptSymbol *kwnames_out; func = &stack_pointer[-3 - oparg]; maybe_self = &stack_pointer[-2 - oparg]; args = &stack_pointer[-1 - oparg]; @@ -2145,10 +2176,10 @@ /* _DO_CALL_KW is not a viable micro-op for tier 2 */ case _PY_FRAME_KW: { - _Py_UopsSymbol *kwnames; - _Py_UopsSymbol **args; - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *kwnames; + JitOptSymbol **args; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; _Py_UOpsAbstractFrame *new_frame; kwnames = stack_pointer[-1]; args = &stack_pointer[-1 - oparg]; @@ -2160,7 +2191,7 @@ (void)kwnames; new_frame = NULL; ctx->done = true; - stack_pointer[-3 - oparg] = (_Py_UopsSymbol *)new_frame; + stack_pointer[-3 - oparg] = (JitOptSymbol *)new_frame; stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); break; @@ -2175,12 +2206,6 @@ } case _EXPAND_METHOD_KW: { - _Py_UopsSymbol **method; - _Py_UopsSymbol **self; - method = &stack_pointer[-3 - oparg]; - self = &stack_pointer[-2 - oparg]; - method[0] = sym_new_not_null(ctx); - self[0] = sym_new_not_null(ctx); break; } @@ -2189,7 +2214,7 @@ } case _CALL_KW_NON_PY: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3 - oparg] = res; stack_pointer += -2 - oparg; @@ -2200,26 +2225,26 @@ /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ case _MAKE_CALLARGS_A_TUPLE: { - _Py_UopsSymbol *tuple; - _Py_UopsSymbol *kwargs_out = NULL; + JitOptSymbol *tuple; + JitOptSymbol *kwargs_out; tuple = sym_new_not_null(ctx); kwargs_out = sym_new_not_null(ctx); - stack_pointer[-1 - (oparg & 1)] = tuple; - if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_out; + stack_pointer[-2] = tuple; + stack_pointer[-1] = kwargs_out; break; } /* _DO_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ case _MAKE_FUNCTION: { - _Py_UopsSymbol *func; + JitOptSymbol *func; func = sym_new_not_null(ctx); stack_pointer[-1] = func; break; } case _SET_FUNCTION_ATTRIBUTE: { - _Py_UopsSymbol *func_out; + JitOptSymbol *func_out; func_out = sym_new_not_null(ctx); stack_pointer[-2] = func_out; stack_pointer += -1; @@ -2228,7 +2253,7 @@ } case _RETURN_GENERATOR: { - _Py_UopsSymbol *res; + JitOptSymbol *res; ctx->frame->stack_pointer = stack_pointer; frame_pop(ctx); stack_pointer = ctx->frame->stack_pointer; @@ -2252,30 +2277,30 @@ } case _BUILD_SLICE: { - _Py_UopsSymbol *slice; + JitOptSymbol *slice; slice = sym_new_not_null(ctx); - stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; - stack_pointer += -1 - ((oparg == 3) ? 1 : 0); + stack_pointer[-oparg] = slice; + stack_pointer += 1 - oparg; assert(WITHIN_STACK_BOUNDS()); break; } case _CONVERT_VALUE: { - _Py_UopsSymbol *result; + JitOptSymbol *result; result = sym_new_not_null(ctx); stack_pointer[-1] = result; break; } case _FORMAT_SIMPLE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _FORMAT_WITH_SPEC: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -2284,8 +2309,8 @@ } case _COPY: { - _Py_UopsSymbol *bottom; - _Py_UopsSymbol *top; + JitOptSymbol *bottom; + JitOptSymbol *top; bottom = stack_pointer[-1 - (oparg-1)]; assert(oparg > 0); top = bottom; @@ -2296,29 +2321,74 @@ } case _BINARY_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; - PyTypeObject *ltype = sym_get_type(left); - PyTypeObject *rtype = sym_get_type(right); - if (ltype != NULL && (ltype == &PyLong_Type || ltype == &PyFloat_Type) && - rtype != NULL && (rtype == &PyLong_Type || rtype == &PyFloat_Type)) - { - if (oparg != NB_TRUE_DIVIDE && oparg != NB_INPLACE_TRUE_DIVIDE && - ltype == &PyLong_Type && rtype == &PyLong_Type) { - /* If both inputs are ints and the op is not division the result is an int */ - res = sym_new_type(ctx, &PyLong_Type); + bool lhs_int = sym_matches_type(left, &PyLong_Type); + bool rhs_int = sym_matches_type(right, &PyLong_Type); + bool lhs_float = sym_matches_type(left, &PyFloat_Type); + bool rhs_float = sym_matches_type(right, &PyFloat_Type); + if (!((lhs_int || lhs_float) && (rhs_int || rhs_float))) { + // There's something other than an int or float involved: + res = sym_new_unknown(ctx); + } + else { + if (oparg == NB_POWER || oparg == NB_INPLACE_POWER) { + // This one's fun... the *type* of the result depends on the + // *values* being exponentiated. However, exponents with one + // constant part are reasonably common, so it's probably worth + // trying to infer some simple cases: + // - A: 1 ** 1 -> 1 (int ** int -> int) + // - B: 1 ** -1 -> 1.0 (int ** int -> float) + // - C: 1.0 ** 1 -> 1.0 (float ** int -> float) + // - D: 1 ** 1.0 -> 1.0 (int ** float -> float) + // - E: -1 ** 0.5 ~> 1j (int ** float -> complex) + // - F: 1.0 ** 1.0 -> 1.0 (float ** float -> float) + // - G: -1.0 ** 0.5 ~> 1j (float ** float -> complex) + if (rhs_float) { + // Case D, E, F, or G... can't know without the sign of the LHS + // or whether the RHS is whole, which isn't worth the effort: + res = sym_new_unknown(ctx); + } + else { + if (lhs_float) { + // Case C: + res = sym_new_type(ctx, &PyFloat_Type); + } + else { + if (!sym_is_const(right)) { + // Case A or B... can't know without the sign of the RHS: + res = sym_new_unknown(ctx); + } + else { + if (_PyLong_IsNegative((PyLongObject *)sym_get_const(right))) { + // Case B: + res = sym_new_type(ctx, &PyFloat_Type); + } + else { + // Case A: + res = sym_new_type(ctx, &PyLong_Type); + } + } + } + } } else { - /* For any other op combining ints/floats the result is a float */ - res = sym_new_type(ctx, &PyFloat_Type); + if (oparg == NB_TRUE_DIVIDE || oparg == NB_INPLACE_TRUE_DIVIDE) { + res = sym_new_type(ctx, &PyFloat_Type); + } + else { + if (lhs_int && rhs_int) { + res = sym_new_type(ctx, &PyLong_Type); + } + else { + res = sym_new_type(ctx, &PyFloat_Type); + } + } } } - else { - res = sym_new_unknown(ctx); - } stack_pointer[-2] = res; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -2326,16 +2396,14 @@ } case _SWAP: { - _Py_UopsSymbol *top_in; - _Py_UopsSymbol *bottom_in; - _Py_UopsSymbol *top_out; - _Py_UopsSymbol *bottom_out; - top_in = stack_pointer[-1]; - bottom_in = stack_pointer[-2 - (oparg-2)]; - bottom_out = bottom_in; - top_out = top_in; - stack_pointer[-2 - (oparg-2)] = top_out; - stack_pointer[-1] = bottom_out; + JitOptSymbol **top; + JitOptSymbol **bottom; + top = &stack_pointer[-1]; + bottom = &stack_pointer[-2 - (oparg-2)]; + JitOptSymbol *temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; + assert(oparg >= 2); break; } @@ -2358,7 +2426,7 @@ /* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 */ case _GUARD_IS_TRUE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2375,7 +2443,7 @@ } case _GUARD_IS_FALSE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2392,7 +2460,7 @@ } case _GUARD_IS_NONE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2417,7 +2485,7 @@ } case _GUARD_IS_NOT_NONE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2475,7 +2543,7 @@ } case _LOAD_CONST_INLINE: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); stack_pointer[0] = value; @@ -2485,7 +2553,7 @@ } case _LOAD_CONST_INLINE_BORROW: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); stack_pointer[0] = value; @@ -2495,81 +2563,38 @@ } case _POP_TOP_LOAD_CONST_INLINE_BORROW: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[-1] = value; break; } - case _LOAD_CONST_INLINE_WITH_NULL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *null; - PyObject *ptr = (PyObject *)this_instr->operand0; - value = sym_new_const(ctx, ptr); - null = sym_new_null(ctx); - stack_pointer[0] = value; - stack_pointer[1] = null; - stack_pointer += 2; - assert(WITHIN_STACK_BOUNDS()); - break; - } - - case _LOAD_CONST_INLINE_BORROW_WITH_NULL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *null; - PyObject *ptr = (PyObject *)this_instr->operand0; - value = sym_new_const(ctx, ptr); - null = sym_new_null(ctx); - stack_pointer[0] = value; - stack_pointer[1] = null; - stack_pointer += 2; - assert(WITHIN_STACK_BOUNDS()); - break; - } - case _CHECK_FUNCTION: { break; } case _LOAD_GLOBAL_MODULE: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; res = sym_new_not_null(ctx); - null = sym_new_null(ctx); stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_GLOBAL_BUILTINS: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; res = sym_new_not_null(ctx); - null = sym_new_null(ctx); stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + (oparg & 1); + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } case _LOAD_ATTR_MODULE: { - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *attr; attr = sym_new_not_null(ctx); - null = sym_new_null(ctx); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; - stack_pointer += (oparg & 1); - assert(WITHIN_STACK_BOUNDS()); - break; - } - - case _INTERNAL_INCREMENT_OPT_COUNTER: { - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); break; } @@ -2598,8 +2623,6 @@ } case _ERROR_POP_N: { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); break; } diff --git a/Python/optimizer_symbols.c b/Python/optimizer_symbols.c index 40cbf95e3d6d39..dcde8e7ce81577 100644 --- a/Python/optimizer_symbols.c +++ b/Python/optimizer_symbols.c @@ -28,11 +28,6 @@ - Bottom: IS_NULL and NOT_NULL flags set, type and const_val NULL. */ -// Flags for below. -#define IS_NULL 1 << 0 -#define NOT_NULL 1 << 1 -#define NO_SPACE 1 << 2 - #ifdef Py_DEBUG static inline int get_lltrace(void) { char *uop_debug = Py_GETENV("PYTHON_OPT_DEBUG"); @@ -48,187 +43,254 @@ static inline int get_lltrace(void) { #define DPRINTF(level, ...) #endif -static _Py_UopsSymbol NO_SPACE_SYMBOL = { - .flags = IS_NULL | NOT_NULL | NO_SPACE, - .typ = NULL, - .const_val = NULL, - .type_version = 0, + +static JitOptSymbol NO_SPACE_SYMBOL = { + .tag = JIT_SYM_BOTTOM_TAG }; -_Py_UopsSymbol * -out_of_space(_Py_UOpsContext *ctx) +JitOptSymbol * +out_of_space(JitOptContext *ctx) { ctx->done = true; ctx->out_of_space = true; return &NO_SPACE_SYMBOL; } -static _Py_UopsSymbol * -sym_new(_Py_UOpsContext *ctx) +static JitOptSymbol * +sym_new(JitOptContext *ctx) { - _Py_UopsSymbol *self = &ctx->t_arena.arena[ctx->t_arena.ty_curr_number]; + JitOptSymbol *self = &ctx->t_arena.arena[ctx->t_arena.ty_curr_number]; if (ctx->t_arena.ty_curr_number >= ctx->t_arena.ty_max_number) { OPT_STAT_INC(optimizer_failure_reason_no_memory); DPRINTF(1, "out of space for symbolic expression type\n"); return NULL; } ctx->t_arena.ty_curr_number++; - self->flags = 0; - self->typ = NULL; - self->const_val = NULL; - self->type_version = 0; - + self->tag = JIT_SYM_UNKNOWN_TAG; return self; } static inline void -sym_set_flag(_Py_UopsSymbol *sym, int flag) -{ - sym->flags |= flag; -} - -static inline void -sym_set_bottom(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym) +sym_set_bottom(JitOptContext *ctx, JitOptSymbol *sym) { - sym_set_flag(sym, IS_NULL | NOT_NULL); - sym->typ = NULL; - Py_CLEAR(sym->const_val); + sym->tag = JIT_SYM_BOTTOM_TAG; ctx->done = true; ctx->contradiction = true; } bool -_Py_uop_sym_is_bottom(_Py_UopsSymbol *sym) +_Py_uop_sym_is_bottom(JitOptSymbol *sym) { - if ((sym->flags & IS_NULL) && (sym->flags & NOT_NULL)) { - assert(sym->flags == (IS_NULL | NOT_NULL)); - assert(sym->typ == NULL); - assert(sym->const_val == NULL); - return true; - } - return false; + return sym->tag == JIT_SYM_BOTTOM_TAG; } bool -_Py_uop_sym_is_not_null(_Py_UopsSymbol *sym) -{ - return sym->flags == NOT_NULL; +_Py_uop_sym_is_not_null(JitOptSymbol *sym) { + return sym->tag == JIT_SYM_NON_NULL_TAG || sym->tag > JIT_SYM_BOTTOM_TAG; } bool -_Py_uop_sym_is_null(_Py_UopsSymbol *sym) +_Py_uop_sym_is_const(JitOptSymbol *sym) { - return sym->flags == IS_NULL; + return sym->tag == JIT_SYM_KNOWN_VALUE_TAG; } bool -_Py_uop_sym_is_const(_Py_UopsSymbol *sym) +_Py_uop_sym_is_null(JitOptSymbol *sym) { - return sym->const_val != NULL; + return sym->tag == JIT_SYM_NULL_TAG; } + PyObject * -_Py_uop_sym_get_const(_Py_UopsSymbol *sym) +_Py_uop_sym_get_const(JitOptSymbol *sym) { - return sym->const_val; + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + return sym->value.value; + } + return NULL; } void -_Py_uop_sym_set_type(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyTypeObject *typ) +_Py_uop_sym_set_type(JitOptContext *ctx, JitOptSymbol *sym, PyTypeObject *typ) { - assert(typ != NULL && PyType_Check(typ)); - if (sym->flags & IS_NULL) { - sym_set_bottom(ctx, sym); - return; - } - if (sym->typ != NULL) { - if (sym->typ != typ) { + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: sym_set_bottom(ctx, sym); return; - } - } - else { - sym_set_flag(sym, NOT_NULL); - sym->typ = typ; + case JIT_SYM_KNOWN_CLASS_TAG: + if (sym->cls.type != typ) { + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_TYPE_VERSION_TAG: + if (sym->version.version == typ->tp_version_tag) { + sym->tag = JIT_SYM_KNOWN_CLASS_TAG; + sym->cls.type = typ; + sym->cls.version = typ->tp_version_tag; + } + else { + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_KNOWN_VALUE_TAG: + if (Py_TYPE(sym->value.value) != typ) { + Py_CLEAR(sym->value.value); + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_TUPLE_TAG: + if (typ != &PyTuple_Type) { + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_BOTTOM_TAG: + return; + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + sym->tag = JIT_SYM_KNOWN_CLASS_TAG; + sym->cls.version = 0; + sym->cls.type = typ; + return; } } bool -_Py_uop_sym_set_type_version(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, unsigned int version) +_Py_uop_sym_set_type_version(JitOptContext *ctx, JitOptSymbol *sym, unsigned int version) { - // if the type version was already set, then it must be different and we should set it to bottom - if (sym->type_version) { - sym_set_bottom(ctx, sym); - return false; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_KNOWN_CLASS_TAG: + if (sym->cls.type->tp_version_tag != version) { + sym_set_bottom(ctx, sym); + return false; + } + else { + sym->cls.version = version; + return true; + } + case JIT_SYM_KNOWN_VALUE_TAG: + Py_CLEAR(sym->value.value); + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_TUPLE_TAG: + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_TYPE_VERSION_TAG: + if (sym->version.version == version) { + return true; + } + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_BOTTOM_TAG: + return false; + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + sym->tag = JIT_SYM_TYPE_VERSION_TAG; + sym->version.version = version; + return true; } - sym->type_version = version; - return true; + Py_UNREACHABLE(); +} + +static void make_const(JitOptSymbol *sym, PyObject *val) +{ + sym->tag = JIT_SYM_KNOWN_VALUE_TAG; + sym->value.value = Py_NewRef(val); } void -_Py_uop_sym_set_const(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyObject *const_val) +_Py_uop_sym_set_const(JitOptContext *ctx, JitOptSymbol *sym, PyObject *const_val) { - assert(const_val != NULL); - if (sym->flags & IS_NULL) { - sym_set_bottom(ctx, sym); - } - PyTypeObject *typ = Py_TYPE(const_val); - if (sym->typ != NULL && sym->typ != typ) { - sym_set_bottom(ctx, sym); - } - if (sym->const_val != NULL) { - if (sym->const_val != const_val) { - // TODO: What if they're equal? + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: sym_set_bottom(ctx, sym); - } - } - else { - sym_set_flag(sym, NOT_NULL); - sym->typ = typ; - sym->const_val = Py_NewRef(const_val); + return; + case JIT_SYM_KNOWN_CLASS_TAG: + if (sym->cls.type != Py_TYPE(const_val)) { + sym_set_bottom(ctx, sym); + return; + } + make_const(sym, const_val); + return; + case JIT_SYM_KNOWN_VALUE_TAG: + if (sym->value.value != const_val) { + Py_CLEAR(sym->value.value); + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_TUPLE_TAG: + sym_set_bottom(ctx, sym); + return; + case JIT_SYM_TYPE_VERSION_TAG: + if (sym->version.version != Py_TYPE(const_val)->tp_version_tag) { + sym_set_bottom(ctx, sym); + return; + } + make_const(sym, const_val); + return; + case JIT_SYM_BOTTOM_TAG: + return; + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + make_const(sym, const_val); + return; } } void -_Py_uop_sym_set_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym) +_Py_uop_sym_set_null(JitOptContext *ctx, JitOptSymbol *sym) { - if (_Py_uop_sym_is_not_null(sym)) { + if (sym->tag == JIT_SYM_UNKNOWN_TAG) { + sym->tag = JIT_SYM_NULL_TAG; + } + else if (sym->tag > JIT_SYM_NULL_TAG) { sym_set_bottom(ctx, sym); } - sym_set_flag(sym, IS_NULL); } void -_Py_uop_sym_set_non_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym) +_Py_uop_sym_set_non_null(JitOptContext *ctx, JitOptSymbol *sym) { - if (_Py_uop_sym_is_null(sym)) { + if (sym->tag == JIT_SYM_UNKNOWN_TAG) { + sym->tag = JIT_SYM_NON_NULL_TAG; + } + else if (sym->tag == JIT_SYM_NULL_TAG) { sym_set_bottom(ctx, sym); } - sym_set_flag(sym, NOT_NULL); } -_Py_UopsSymbol * -_Py_uop_sym_new_unknown(_Py_UOpsContext *ctx) +JitOptSymbol * +_Py_uop_sym_new_unknown(JitOptContext *ctx) { - return sym_new(ctx); + JitOptSymbol *res = sym_new(ctx); + if (res == NULL) { + return out_of_space(ctx); + } + return res; } -_Py_UopsSymbol * -_Py_uop_sym_new_not_null(_Py_UOpsContext *ctx) +JitOptSymbol * +_Py_uop_sym_new_not_null(JitOptContext *ctx) { - _Py_UopsSymbol *res = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *res = sym_new(ctx); if (res == NULL) { return out_of_space(ctx); } - sym_set_flag(res, NOT_NULL); + res->tag = JIT_SYM_NON_NULL_TAG; return res; } -_Py_UopsSymbol * -_Py_uop_sym_new_type(_Py_UOpsContext *ctx, PyTypeObject *typ) +JitOptSymbol * +_Py_uop_sym_new_type(JitOptContext *ctx, PyTypeObject *typ) { - _Py_UopsSymbol *res = sym_new(ctx); + JitOptSymbol *res = sym_new(ctx); if (res == NULL) { return out_of_space(ctx); } @@ -237,11 +299,11 @@ _Py_uop_sym_new_type(_Py_UOpsContext *ctx, PyTypeObject *typ) } // Adds a new reference to const_val, owned by the symbol. -_Py_UopsSymbol * -_Py_uop_sym_new_const(_Py_UOpsContext *ctx, PyObject *const_val) +JitOptSymbol * +_Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val) { assert(const_val != NULL); - _Py_UopsSymbol *res = sym_new(ctx); + JitOptSymbol *res = sym_new(ctx); if (res == NULL) { return out_of_space(ctx); } @@ -249,10 +311,10 @@ _Py_uop_sym_new_const(_Py_UOpsContext *ctx, PyObject *const_val) return res; } -_Py_UopsSymbol * -_Py_uop_sym_new_null(_Py_UOpsContext *ctx) +JitOptSymbol * +_Py_uop_sym_new_null(JitOptContext *ctx) { - _Py_UopsSymbol *null_sym = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *null_sym = sym_new(ctx); if (null_sym == NULL) { return out_of_space(ctx); } @@ -261,64 +323,105 @@ _Py_uop_sym_new_null(_Py_UOpsContext *ctx) } PyTypeObject * -_Py_uop_sym_get_type(_Py_UopsSymbol *sym) +_Py_uop_sym_get_type(JitOptSymbol *sym) { - if (_Py_uop_sym_is_bottom(sym)) { - return NULL; - } - return sym->typ; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_TYPE_VERSION_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return NULL; + case JIT_SYM_KNOWN_CLASS_TAG: + return sym->cls.type; + case JIT_SYM_KNOWN_VALUE_TAG: + return Py_TYPE(sym->value.value); + case JIT_SYM_TUPLE_TAG: + return &PyTuple_Type; + } + Py_UNREACHABLE(); } unsigned int -_Py_uop_sym_get_type_version(_Py_UopsSymbol *sym) +_Py_uop_sym_get_type_version(JitOptSymbol *sym) { - return sym->type_version; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return 0; + case JIT_SYM_TYPE_VERSION_TAG: + return sym->version.version; + case JIT_SYM_KNOWN_CLASS_TAG: + return sym->cls.version; + case JIT_SYM_KNOWN_VALUE_TAG: + return Py_TYPE(sym->value.value)->tp_version_tag; + case JIT_SYM_TUPLE_TAG: + return PyTuple_Type.tp_version_tag; + } + Py_UNREACHABLE(); } bool -_Py_uop_sym_has_type(_Py_UopsSymbol *sym) +_Py_uop_sym_has_type(JitOptSymbol *sym) { - if (_Py_uop_sym_is_bottom(sym)) { - return false; - } - return sym->typ != NULL; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_TYPE_VERSION_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return false; + case JIT_SYM_KNOWN_CLASS_TAG: + case JIT_SYM_KNOWN_VALUE_TAG: + case JIT_SYM_TUPLE_TAG: + return true; + } + Py_UNREACHABLE(); } bool -_Py_uop_sym_matches_type(_Py_UopsSymbol *sym, PyTypeObject *typ) +_Py_uop_sym_matches_type(JitOptSymbol *sym, PyTypeObject *typ) { assert(typ != NULL && PyType_Check(typ)); return _Py_uop_sym_get_type(sym) == typ; } bool -_Py_uop_sym_matches_type_version(_Py_UopsSymbol *sym, unsigned int version) +_Py_uop_sym_matches_type_version(JitOptSymbol *sym, unsigned int version) { return _Py_uop_sym_get_type_version(sym) == version; } - int -_Py_uop_sym_truthiness(_Py_UopsSymbol *sym) -{ - /* There are some non-constant values for - * which `bool(val)` always evaluates to - * True or False, such as tuples with known - * length, but unknown contents, or bound-methods. - * This function will need updating - * should we support those values. - */ - if (_Py_uop_sym_is_bottom(sym)) { - return -1; - } - if (!_Py_uop_sym_is_const(sym)) { - return -1; - } - PyObject *value = _Py_uop_sym_get_const(sym); +_Py_uop_sym_truthiness(JitOptSymbol *sym) +{ + switch(sym->tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_TYPE_VERSION_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return -1; + case JIT_SYM_KNOWN_CLASS_TAG: + /* TODO : + * Instances of some classes are always + * true. We should return 1 in those cases */ + return -1; + case JIT_SYM_KNOWN_VALUE_TAG: + break; + case JIT_SYM_TUPLE_TAG: + return sym->tuple.length != 0; + } + PyObject *value = sym->value.value; + /* Only handle a few known safe types */ if (value == Py_None) { return 0; } - /* Only handle a few known safe types */ PyTypeObject *tp = Py_TYPE(value); if (tp == &PyLong_Type) { return !_PyLong_IsZero((PyLongObject *)value); @@ -332,13 +435,84 @@ _Py_uop_sym_truthiness(_Py_UopsSymbol *sym) return -1; } +static JitOptSymbol * +allocation_base(JitOptContext *ctx) +{ + return ctx->t_arena.arena; +} + +JitOptSymbol * +_Py_uop_sym_new_tuple(JitOptContext *ctx, int size, JitOptSymbol **args) +{ + JitOptSymbol *res = sym_new(ctx); + if (res == NULL) { + return out_of_space(ctx); + } + if (size > MAX_SYMBOLIC_TUPLE_SIZE) { + res->tag = JIT_SYM_KNOWN_CLASS_TAG; + res->cls.type = &PyTuple_Type; + } + else { + res->tag = JIT_SYM_TUPLE_TAG; + res->tuple.length = size; + for (int i = 0; i < size; i++) { + res->tuple.items[i] = (uint16_t)(args[i] - allocation_base(ctx)); + } + } + return res; +} + +JitOptSymbol * +_Py_uop_sym_tuple_getitem(JitOptContext *ctx, JitOptSymbol *sym, int item) +{ + assert(item >= 0); + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + PyObject *tuple = sym->value.value; + if (PyTuple_CheckExact(tuple) && item < PyTuple_GET_SIZE(tuple)) { + return _Py_uop_sym_new_const(ctx, PyTuple_GET_ITEM(tuple, item)); + } + } + else if (sym->tag == JIT_SYM_TUPLE_TAG && item < sym->tuple.length) { + return allocation_base(ctx) + sym->tuple.items[item]; + } + return _Py_uop_sym_new_unknown(ctx); +} + +int +_Py_uop_sym_tuple_length(JitOptSymbol *sym) +{ + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + PyObject *tuple = sym->value.value; + if (PyTuple_CheckExact(tuple)) { + return PyTuple_GET_SIZE(tuple); + } + } + else if (sym->tag == JIT_SYM_TUPLE_TAG) { + return sym->tuple.length; + } + return -1; +} + +// Return true if known to be immortal. +bool +_Py_uop_sym_is_immortal(JitOptSymbol *sym) +{ + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + return _Py_IsImmortal(sym->value.value); + } + if (sym->tag == JIT_SYM_KNOWN_CLASS_TAG) { + return sym->cls.type == &PyBool_Type; + } + return false; +} + // 0 on success, -1 on error. _Py_UOpsAbstractFrame * _Py_uop_frame_new( - _Py_UOpsContext *ctx, + JitOptContext *ctx, PyCodeObject *co, int curr_stackentries, - _Py_UopsSymbol **args, + JitOptSymbol **args, int arg_len) { assert(ctx->curr_frame_depth < MAX_ABSTRACT_FRAME_DEPTH); @@ -363,14 +537,14 @@ _Py_uop_frame_new( } for (int i = arg_len; i < co->co_nlocalsplus; i++) { - _Py_UopsSymbol *local = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *local = _Py_uop_sym_new_unknown(ctx); frame->locals[i] = local; } // Initialize the stack as well for (int i = 0; i < curr_stackentries; i++) { - _Py_UopsSymbol *stackvar = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *stackvar = _Py_uop_sym_new_unknown(ctx); frame->stack[i] = stackvar; } @@ -378,7 +552,7 @@ _Py_uop_frame_new( } void -_Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx) +_Py_uop_abstractcontext_fini(JitOptContext *ctx) { if (ctx == NULL) { return; @@ -386,13 +560,17 @@ _Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx) ctx->curr_frame_depth = 0; int tys = ctx->t_arena.ty_curr_number; for (int i = 0; i < tys; i++) { - Py_CLEAR(ctx->t_arena.arena[i].const_val); + JitOptSymbol *sym = &ctx->t_arena.arena[i]; + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + Py_CLEAR(sym->value.value); + } } } void -_Py_uop_abstractcontext_init(_Py_UOpsContext *ctx) +_Py_uop_abstractcontext_init(JitOptContext *ctx) { + static_assert(sizeof(JitOptSymbol) <= 2*sizeof(uint64_t)); ctx->limit = ctx->locals_and_stack + MAX_ABSTRACT_INTERP_SIZE; ctx->n_consumed = ctx->locals_and_stack; #ifdef Py_DEBUG // Aids debugging a little. There should never be NULL in the abstract interpreter. @@ -410,7 +588,7 @@ _Py_uop_abstractcontext_init(_Py_UOpsContext *ctx) } int -_Py_uop_frame_pop(_Py_UOpsContext *ctx) +_Py_uop_frame_pop(JitOptContext *ctx) { _Py_UOpsAbstractFrame *frame = ctx->frame; ctx->n_consumed = frame->locals; @@ -431,26 +609,25 @@ do { \ } \ } while (0) -static _Py_UopsSymbol * -make_bottom(_Py_UOpsContext *ctx) +static JitOptSymbol * +make_bottom(JitOptContext *ctx) { - _Py_UopsSymbol *sym = _Py_uop_sym_new_unknown(ctx); - _Py_uop_sym_set_null(ctx, sym); - _Py_uop_sym_set_non_null(ctx, sym); + JitOptSymbol *sym = sym_new(ctx); + sym->tag = JIT_SYM_BOTTOM_TAG; return sym; } PyObject * _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { - _Py_UOpsContext context; - _Py_UOpsContext *ctx = &context; + JitOptContext context; + JitOptContext *ctx = &context; _Py_uop_abstractcontext_init(ctx); PyObject *val_42 = NULL; PyObject *val_43 = NULL; // Use a single 'sym' variable so copy-pasting tests is easier. - _Py_UopsSymbol *sym = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *sym = _Py_uop_sym_new_unknown(ctx); if (sym == NULL) { goto fail; } @@ -510,6 +687,7 @@ _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) TEST_PREDICATE(_Py_uop_sym_is_const(sym), "42 is not a constant"); TEST_PREDICATE(_Py_uop_sym_get_const(sym) != NULL, "42 as constant is NULL"); TEST_PREDICATE(_Py_uop_sym_get_const(sym) == val_42, "42 as constant isn't 42"); + TEST_PREDICATE(_Py_uop_sym_is_immortal(sym), "42 is not immortal"); _Py_uop_sym_set_type(ctx, sym, &PyLong_Type); // Should be a no-op TEST_PREDICATE(_Py_uop_sym_matches_type(sym, &PyLong_Type), "(42 and 42) isn't an int"); @@ -518,6 +696,9 @@ _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) _Py_uop_sym_set_type(ctx, sym, &PyFloat_Type); // Should make it bottom TEST_PREDICATE(_Py_uop_sym_is_bottom(sym), "(42 and float) isn't bottom"); + sym = _Py_uop_sym_new_type(ctx, &PyBool_Type); + TEST_PREDICATE(_Py_uop_sym_is_immortal(sym), "a bool is not immortal"); + sym = _Py_uop_sym_new_type(ctx, &PyLong_Type); if (sym == NULL) { goto fail; @@ -534,15 +715,37 @@ _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) sym = _Py_uop_sym_new_const(ctx, PyLong_FromLong(0)); TEST_PREDICATE(_Py_uop_sym_truthiness(sym) == 0, "bool(0) is not False"); + JitOptSymbol *i1 = _Py_uop_sym_new_type(ctx, &PyFloat_Type); + JitOptSymbol *i2 = _Py_uop_sym_new_const(ctx, val_43); + JitOptSymbol *array[2] = { i1, i2 }; + sym = _Py_uop_sym_new_tuple(ctx, 2, array); + TEST_PREDICATE( + _Py_uop_sym_matches_type(_Py_uop_sym_tuple_getitem(ctx, sym, 0), &PyFloat_Type), + "tuple item does not match value used to create tuple" + ); + TEST_PREDICATE( + _Py_uop_sym_get_const(_Py_uop_sym_tuple_getitem(ctx, sym, 1)) == val_43, + "tuple item does not match value used to create tuple" + ); + PyObject *pair[2] = { val_42, val_43 }; + PyObject *tuple = _PyTuple_FromArray(pair, 2); + sym = _Py_uop_sym_new_const(ctx, tuple); + TEST_PREDICATE( + _Py_uop_sym_get_const(_Py_uop_sym_tuple_getitem(ctx, sym, 1)) == val_43, + "tuple item does not match value used to create tuple" + ); + _Py_uop_abstractcontext_fini(ctx); Py_DECREF(val_42); Py_DECREF(val_43); + Py_DECREF(tuple); Py_RETURN_NONE; fail: _Py_uop_abstractcontext_fini(ctx); Py_XDECREF(val_42); Py_XDECREF(val_43); + Py_DECREF(tuple); return NULL; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 06418123d6dd9b..300a871d2cc4bf 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -46,8 +46,25 @@ #if defined(__APPLE__) # include +# include # include -# include +// The os_log unified logging APIs were introduced in macOS 10.12, iOS 10.0, +// tvOS 10.0, and watchOS 3.0; +# if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE +# define HAS_APPLE_SYSTEM_LOG 1 +# elif defined(TARGET_OS_OSX) && TARGET_OS_OSX +# if defined(MAC_OS_X_VERSION_10_12) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_12 +# define HAS_APPLE_SYSTEM_LOG 1 +# else +# define HAS_APPLE_SYSTEM_LOG 0 +# endif +# else +# define HAS_APPLE_SYSTEM_LOG 0 +# endif + +# if HAS_APPLE_SYSTEM_LOG +# include +# endif #endif #ifdef HAVE_SIGNAL_H @@ -77,7 +94,7 @@ static PyStatus init_sys_streams(PyThreadState *tstate); #ifdef __ANDROID__ static PyStatus init_android_streams(PyThreadState *tstate); #endif -#if defined(__APPLE__) +#if defined(__APPLE__) && HAS_APPLE_SYSTEM_LOG static PyStatus init_apple_streams(PyThreadState *tstate); #endif static void wait_for_thread_shutdown(PyThreadState *tstate); @@ -94,23 +111,7 @@ static void call_ll_exitfuncs(_PyRuntimeState *runtime); _Py_COMP_DIAG_PUSH _Py_COMP_DIAG_IGNORE_DEPR_DECLS -#if defined(MS_WINDOWS) - -#pragma section("PyRuntime", read, write) -__declspec(allocate("PyRuntime")) - -#elif defined(__APPLE__) - -__attribute__(( - section(SEG_DATA ",PyRuntime") -)) - -#endif - -_PyRuntimeState _PyRuntime -#if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) -__attribute__ ((section (".PyRuntime"))) -#endif +GENERATE_DEBUG_SECTION(PyRuntime, _PyRuntimeState _PyRuntime) = _PyRuntimeState_INIT(_PyRuntime, _Py_Debug_Cookie); _Py_COMP_DIAG_POP @@ -427,40 +428,6 @@ interpreter_update_config(PyThreadState *tstate, int only_update_path_config) } -int -_PyInterpreterState_SetConfig(const PyConfig *src_config) -{ - PyThreadState *tstate = _PyThreadState_GET(); - int res = -1; - - PyConfig config; - PyConfig_InitPythonConfig(&config); - PyStatus status = _PyConfig_Copy(&config, src_config); - if (_PyStatus_EXCEPTION(status)) { - _PyErr_SetFromPyStatus(status); - goto done; - } - - status = _PyConfig_Read(&config, 1); - if (_PyStatus_EXCEPTION(status)) { - _PyErr_SetFromPyStatus(status); - goto done; - } - - status = _PyConfig_Copy(&tstate->interp->config, &config); - if (_PyStatus_EXCEPTION(status)) { - _PyErr_SetFromPyStatus(status); - goto done; - } - - res = interpreter_update_config(tstate, 0); - -done: - PyConfig_Clear(&config); - return res; -} - - /* Global initializations. Can be undone by Py_Finalize(). Don't call this twice without an intervening Py_Finalize() call. @@ -690,7 +657,12 @@ pycore_create_interpreter(_PyRuntimeState *runtime, // the settings are loaded (so that feature_flags are set) but before // any calls are made to obmalloc functions. if (_PyMem_init_obmalloc(interp) < 0) { - return _PyStatus_NO_MEMORY(); + return _PyStatus_NO_MEMORY(); + } + + status = _PyTraceMalloc_Init(); + if (_PyStatus_EXCEPTION(status)) { + return status; } PyThreadState *tstate = _PyThreadState_New(interp, @@ -1262,7 +1234,7 @@ init_interp_main(PyThreadState *tstate) return status; } #endif -#if defined(__APPLE__) +#if defined(__APPLE__) && HAS_APPLE_SYSTEM_LOG if (config->use_system_logger) { status = init_apple_streams(tstate); if (_PyStatus_EXCEPTION(status)) { @@ -1334,14 +1306,7 @@ init_interp_main(PyThreadState *tstate) } else #endif { - PyObject *opt = _PyOptimizer_NewUOpOptimizer(); - if (opt == NULL) { - return _PyStatus_ERR("can't initialize optimizer"); - } - if (_Py_SetTier2Optimizer((_PyOptimizerObject *)opt)) { - return _PyStatus_ERR("can't install optimizer"); - } - Py_DECREF(opt); + interp->jit = true; } } } @@ -1483,18 +1448,6 @@ Py_Initialize(void) } -PyStatus -_Py_InitializeMain(void) -{ - PyStatus status = _PyRuntime_Initialize(); - if (_PyStatus_EXCEPTION(status)) { - return status; - } - PyThreadState *tstate = _PyThreadState_GET(); - return pyinit_main(tstate); -} - - static void finalize_modules_delete_special(PyThreadState *tstate, int verbose) { @@ -1522,13 +1475,15 @@ finalize_modules_delete_special(PyThreadState *tstate, int verbose) PySys_WriteStderr("# clear builtins._\n"); } if (PyDict_SetItemString(interp->builtins, "_", Py_None) < 0) { - PyErr_FormatUnraisable("Exception ignored on setting builtin variable _"); + PyErr_FormatUnraisable("Exception ignored while " + "setting builtin variable _"); } const char * const *p; for (p = sys_deletes; *p != NULL; p++) { if (_PySys_ClearAttrString(interp, *p, verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.%s", *p); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.%s", *p); } } for (p = sys_files; *p != NULL; p+=2) { @@ -1539,13 +1494,15 @@ finalize_modules_delete_special(PyThreadState *tstate, int verbose) } PyObject *value; if (PyDict_GetItemStringRef(interp->sysdict, orig_name, &value) < 0) { - PyErr_FormatUnraisable("Exception ignored on restoring sys.%s", name); + PyErr_FormatUnraisable("Exception ignored while " + "restoring sys.%s", name); } if (value == NULL) { value = Py_NewRef(Py_None); } if (PyDict_SetItemString(interp->sysdict, name, value) < 0) { - PyErr_FormatUnraisable("Exception ignored on restoring sys.%s", name); + PyErr_FormatUnraisable("Exception ignored while " + "restoring sys.%s", name); } Py_DECREF(value); } @@ -1557,7 +1514,7 @@ finalize_remove_modules(PyObject *modules, int verbose) { PyObject *weaklist = PyList_New(0); if (weaklist == NULL) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); } #define STORE_MODULE_WEAKREF(name, mod) \ @@ -1566,13 +1523,13 @@ finalize_remove_modules(PyObject *modules, int verbose) if (wr) { \ PyObject *tup = PyTuple_Pack(2, name, wr); \ if (!tup || PyList_Append(weaklist, tup) < 0) { \ - PyErr_FormatUnraisable("Exception ignored on removing modules"); \ + PyErr_FormatUnraisable("Exception ignored while removing modules"); \ } \ Py_XDECREF(tup); \ Py_DECREF(wr); \ } \ else { \ - PyErr_FormatUnraisable("Exception ignored on removing modules"); \ + PyErr_FormatUnraisable("Exception ignored while removing modules"); \ } \ } @@ -1583,7 +1540,7 @@ finalize_remove_modules(PyObject *modules, int verbose) } \ STORE_MODULE_WEAKREF(name, mod); \ if (PyObject_SetItem(modules, name, Py_None) < 0) { \ - PyErr_FormatUnraisable("Exception ignored on removing modules"); \ + PyErr_FormatUnraisable("Exception ignored while removing modules"); \ } \ } @@ -1597,14 +1554,14 @@ finalize_remove_modules(PyObject *modules, int verbose) else { PyObject *iterator = PyObject_GetIter(modules); if (iterator == NULL) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); } else { PyObject *key; while ((key = PyIter_Next(iterator))) { PyObject *value = PyObject_GetItem(modules, key); if (value == NULL) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); continue; } CLEAR_MODULE(key, value); @@ -1612,7 +1569,7 @@ finalize_remove_modules(PyObject *modules, int verbose) Py_DECREF(key); } if (PyErr_Occurred()) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); } Py_DECREF(iterator); } @@ -1632,7 +1589,7 @@ finalize_clear_modules_dict(PyObject *modules) } else { if (PyObject_CallMethodNoArgs(modules, &_Py_ID(clear)) == NULL) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.modules"); + PyErr_FormatUnraisable("Exception ignored while clearing sys.modules"); } } } @@ -1644,11 +1601,11 @@ finalize_restore_builtins(PyThreadState *tstate) PyInterpreterState *interp = tstate->interp; PyObject *dict = PyDict_Copy(interp->builtins); if (dict == NULL) { - PyErr_FormatUnraisable("Exception ignored on restoring builtins"); + PyErr_FormatUnraisable("Exception ignored while restoring builtins"); } PyDict_Clear(interp->builtins); if (PyDict_Update(interp->builtins, interp->builtins_copy)) { - PyErr_FormatUnraisable("Exception ignored on restoring builtins"); + PyErr_FormatUnraisable("Exception ignored while restoring builtins"); } Py_XDECREF(dict); } @@ -1705,11 +1662,10 @@ finalize_modules(PyThreadState *tstate) { PyInterpreterState *interp = tstate->interp; + // Invalidate all executors and turn off JIT: + interp->jit = false; #ifdef _Py_TIER2 - // Invalidate all executors and turn off tier 2 optimizer _Py_Executors_InvalidateAll(interp, 0); - _PyOptimizerObject *old = _Py_SetOptimizer(interp, NULL); - Py_XDECREF(old); #endif // Stop watching __builtin__ modifications @@ -1821,7 +1777,7 @@ flush_std_files(void) if (fout != NULL && fout != Py_None && !file_is_closed(fout)) { if (_PyFile_Flush(fout) < 0) { - PyErr_FormatUnraisable("Exception ignored on flushing sys.stdout"); + PyErr_FormatUnraisable("Exception ignored while flushing sys.stdout"); status = -1; } } @@ -2206,6 +2162,7 @@ _Py_Finalize(_PyRuntimeState *runtime) finalize_interp_clear(tstate); + #ifdef Py_TRACE_REFS /* Display addresses (& refcnts) of all objects still alive. * An address can be used to find the repr of the object, printed @@ -2656,7 +2613,7 @@ create_stdio(const PyConfig *config, PyObject* io, #ifdef HAVE_WINDOWS_CONSOLE_IO /* Windows console IO is always UTF-8 encoded */ - PyTypeObject *winconsoleio_type = (PyTypeObject *)_PyImport_GetModuleAttr( + PyTypeObject *winconsoleio_type = (PyTypeObject *)PyImport_ImportModuleAttr( &_Py_ID(_io), &_Py_ID(_WindowsConsoleIO)); if (winconsoleio_type == NULL) { goto error; @@ -2761,7 +2718,7 @@ init_set_builtins_open(void) goto error; } - if (!(wrapper = _PyImport_GetModuleAttrString("io", "open"))) { + if (!(wrapper = PyImport_ImportModuleAttrString("io", "open"))) { goto error; } @@ -2946,7 +2903,7 @@ init_android_streams(PyThreadState *tstate) #endif // __ANDROID__ -#if defined(__APPLE__) +#if defined(__APPLE__) && HAS_APPLE_SYSTEM_LOG static PyObject * apple_log_write_impl(PyObject *self, PyObject *args) @@ -2957,14 +2914,9 @@ apple_log_write_impl(PyObject *self, PyObject *args) return NULL; } - // Call the underlying Apple logging API. The os_log unified logging APIs - // were introduced in macOS 10.12, iOS 10.0, tvOS 10.0, and watchOS 3.0; - // this call is a no-op on older versions. - #if TARGET_OS_IPHONE || (TARGET_OS_OSX && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_12) // Pass the user-provided text through explicit %s formatting // to avoid % literals being interpreted as a formatting directive. os_log_with_type(OS_LOG_DEFAULT, logtype, "%s", text); - #endif Py_RETURN_NONE; } @@ -2999,7 +2951,6 @@ init_apple_streams(PyThreadState *tstate) if (result == NULL) { goto error; } - goto done; error: @@ -3013,7 +2964,7 @@ init_apple_streams(PyThreadState *tstate) return status; } -#endif // __APPLE__ +#endif // __APPLE__ && HAS_APPLE_SYSTEM_LOG static void @@ -3023,7 +2974,11 @@ _Py_FatalError_DumpTracebacks(int fd, PyInterpreterState *interp, PUTS(fd, "\n"); /* display the current Python stack */ +#ifndef Py_GIL_DISABLED _Py_DumpTracebackThreads(fd, interp, tstate); +#else + _Py_DumpTraceback(fd, tstate); +#endif } /* Print the current exception (if an exception is set) with its traceback, diff --git a/Python/pystate.c b/Python/pystate.c index c546b7c3a9f10e..e6770ef40df740 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -655,11 +655,9 @@ init_interpreter(PyInterpreterState *interp, } interp->sys_profile_initialized = false; interp->sys_trace_initialized = false; -#ifdef _Py_TIER2 - (void)_Py_SetOptimizer(interp, NULL); + interp->jit = false; interp->executor_list_head = NULL; interp->trace_run_counter = JIT_CLEANUP_THRESHOLD; -#endif if (interp != &runtime->_main_interpreter) { /* Fix the self-referential, statically initialized fields. */ interp->dtoa = (struct _dtoa_state)_dtoa_state_INIT(interp); @@ -829,12 +827,6 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) tstate->_status.cleared = 0; } -#ifdef _Py_TIER2 - _PyOptimizerObject *old = _Py_SetOptimizer(interp, NULL); - assert(old != NULL); - Py_DECREF(old); -#endif - /* It is possible that any of the objects below have a finalizer that runs Python code or otherwise relies on a thread state or even the interpreter state. For now we trust that isn't @@ -1515,6 +1507,7 @@ init_threadstate(_PyThreadStateImpl *_tstate, tstate->dict_global_version = 0; _tstate->asyncio_running_loop = NULL; + _tstate->asyncio_running_task = NULL; tstate->delete_later = NULL; @@ -1697,6 +1690,7 @@ PyThreadState_Clear(PyThreadState *tstate) Py_CLEAR(tstate->threading_local_sentinel); Py_CLEAR(((_PyThreadStateImpl *)tstate)->asyncio_running_loop); + Py_CLEAR(((_PyThreadStateImpl *)tstate)->asyncio_running_task); Py_CLEAR(tstate->dict); Py_CLEAR(tstate->async_exc); @@ -2880,24 +2874,9 @@ _PyInterpreterState_GetConfig(PyInterpreterState *interp) } -int -_PyInterpreterState_GetConfigCopy(PyConfig *config) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - - PyStatus status = _PyConfig_Copy(config, &interp->config); - if (PyStatus_Exception(status)) { - _PyErr_SetFromPyStatus(status); - return -1; - } - return 0; -} - - const PyConfig* _Py_GetConfig(void) { - assert(PyGILState_Check()); PyThreadState *tstate = current_fast_get(); _Py_EnsureTstateNotNULL(tstate); return _PyInterpreterState_GetConfig(tstate->interp); diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 31e065ff00d59a..0da26ad3f9b4bd 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -467,7 +467,7 @@ _PyRun_SimpleFileObject(FILE *fp, PyObject *filename, int closeit, fclose(fp); } - pyc_fp = _Py_fopen_obj(filename, "rb"); + pyc_fp = Py_fopen(filename, "rb"); if (pyc_fp == NULL) { fprintf(stderr, "python: Can't reopen .pyc file\n"); goto done; diff --git a/Python/pytime.c b/Python/pytime.c index 2b37cd991ef4e4..c039fc98ce4bde 100644 --- a/Python/pytime.c +++ b/Python/pytime.c @@ -1,5 +1,6 @@ #include "Python.h" #include "pycore_time.h" // PyTime_t +#include "pycore_pystate.h" // _Py_AssertHoldsTstate() #include // gmtime_r() #ifdef HAVE_SYS_TIME_H @@ -897,14 +898,14 @@ _PyTime_AsTimespec(PyTime_t t, struct timespec *ts) #endif -// N.B. If raise_exc=0, this may be called without the GIL. +// N.B. If raise_exc=0, this may be called without a thread state. static int py_get_system_clock(PyTime_t *tp, _Py_clock_info_t *info, int raise_exc) { assert(info == NULL || raise_exc); if (raise_exc) { - // raise_exc requires to hold the GIL - assert(PyGILState_Check()); + // raise_exc requires to hold a thread state + _Py_AssertHoldsTstate(); } #ifdef MS_WINDOWS @@ -1142,14 +1143,14 @@ py_mach_timebase_info(_PyTimeFraction *base, int raise_exc) #endif -// N.B. If raise_exc=0, this may be called without the GIL. +// N.B. If raise_exc=0, this may be called without a thread state. static int py_get_monotonic_clock(PyTime_t *tp, _Py_clock_info_t *info, int raise_exc) { assert(info == NULL || raise_exc); if (raise_exc) { - // raise_exc requires to hold the GIL - assert(PyGILState_Check()); + // raise_exc requires to hold a thread state + _Py_AssertHoldsTstate(); } #if defined(MS_WINDOWS) diff --git a/Python/specialize.c b/Python/specialize.c index c918c77779d20d..8831cfaa82be9b 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -3,6 +3,7 @@ #include "opcode.h" #include "pycore_code.h" +#include "pycore_critical_section.h" #include "pycore_descrobject.h" // _PyMethodWrapper_Type #include "pycore_dict.h" // DICT_KEYS_UNICODE #include "pycore_function.h" // _PyFunction_GetVersionForCurrentState() @@ -308,6 +309,14 @@ print_optimization_stats(FILE *out, OptimizationStats *stats) ); } } + fprintf(out, "JIT total memory size: %" PRIu64 "\n", stats->jit_total_memory_size); + fprintf(out, "JIT code size: %" PRIu64 "\n", stats->jit_code_size); + fprintf(out, "JIT trampoline size: %" PRIu64 "\n", stats->jit_trampoline_size); + fprintf(out, "JIT data size: %" PRIu64 "\n", stats->jit_data_size); + fprintf(out, "JIT padding size: %" PRIu64 "\n", stats->jit_padding_size); + fprintf(out, "JIT freed memory size: %" PRIu64 "\n", stats->jit_freed_memory_size); + + print_histogram(out, "Trace total memory size", stats->trace_total_memory_hist); } #endif @@ -440,8 +449,7 @@ do { \ // Initialize warmup counters and optimize instructions. This cannot fail. void -_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, - int enable_counters) +_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, int enable_counters) { #if ENABLE_SPECIALIZATION_FT _Py_BackoffCounter jump_counter, adaptive_counter; @@ -478,15 +486,6 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, } i += caches; } - else if (opcode == LOAD_CONST) { - /* We can't do this in the bytecode compiler as - * marshalling can intern strings and make them immortal. */ - - PyObject *obj = PyTuple_GET_ITEM(consts, oparg); - if (_Py_IsImmortal(obj)) { - instructions[i].op.code = LOAD_CONST_IMMORTAL; - } - } if (opcode != EXTENDED_ARG) { oparg = 0; } @@ -546,6 +545,7 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, #define SPEC_FAIL_ATTR_BUILTIN_CLASS_METHOD_OBJ 33 #define SPEC_FAIL_ATTR_METACLASS_OVERRIDDEN 34 #define SPEC_FAIL_ATTR_SPLIT_DICT 35 +#define SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED 36 /* Binary subscr and store subscr */ @@ -588,6 +588,10 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, #define SPEC_FAIL_BINARY_OP_TRUE_DIVIDE_FLOAT 26 #define SPEC_FAIL_BINARY_OP_TRUE_DIVIDE_OTHER 27 #define SPEC_FAIL_BINARY_OP_XOR 28 +#define SPEC_FAIL_BINARY_OP_OR_INT 29 +#define SPEC_FAIL_BINARY_OP_OR_DIFFERENT_TYPES 30 +#define SPEC_FAIL_BINARY_OP_XOR_INT 31 +#define SPEC_FAIL_BINARY_OP_XOR_DIFFERENT_TYPES 32 /* Calls */ @@ -738,11 +742,8 @@ unspecialize(_Py_CODEUNIT *instr) } static int function_kind(PyCodeObject *code); -#ifndef Py_GIL_DISABLED static bool function_check_args(PyObject *o, int expected_argcount, int opcode); static uint32_t function_get_version(PyObject *o, int opcode); -static uint32_t type_get_version(PyTypeObject *t, int opcode); -#endif static int specialize_module_load_attr_lock_held(PyDictObject *dict, _Py_CODEUNIT *instr, PyObject *name) @@ -888,10 +889,11 @@ descriptor_is_class(PyObject *descriptor, PyObject *name) (descriptor == _PyType_Lookup(&PyBaseObject_Type, name))); } -#ifndef Py_GIL_DISABLED static DescriptorClassification -analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr) { +analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr, unsigned int *tp_version) { bool has_getattr = false; + bool have_ga_version = false; + unsigned int ga_version; getattrofunc getattro_slot = type->tp_getattro; if (getattro_slot == PyObject_GenericGetAttr) { /* Normal attribute lookup; */ @@ -901,24 +903,27 @@ analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr) { getattro_slot == _Py_slot_tp_getattro) { /* One or both of __getattribute__ or __getattr__ may have been overridden See typeobject.c for why these functions are special. */ - PyObject *getattribute = _PyType_LookupRef(type, &_Py_ID(__getattribute__)); + PyObject *getattribute = _PyType_LookupRefAndVersion(type, + &_Py_ID(__getattribute__), &ga_version); + have_ga_version = true; PyInterpreterState *interp = _PyInterpreterState_GET(); bool has_custom_getattribute = getattribute != NULL && getattribute != interp->callable_cache.object__getattribute__; - PyObject *getattr = _PyType_LookupRef(type, &_Py_ID(__getattr__)); + PyObject *getattr = _PyType_Lookup(type, &_Py_ID(__getattr__)); has_getattr = getattr != NULL; - Py_XDECREF(getattr); if (has_custom_getattribute) { if (getattro_slot == _Py_slot_tp_getattro && !has_getattr && Py_IS_TYPE(getattribute, &PyFunction_Type)) { *descr = getattribute; + *tp_version = ga_version; return GETATTRIBUTE_IS_PYTHON_FUNCTION; } /* Potentially both __getattr__ and __getattribute__ are set. Too complicated */ Py_DECREF(getattribute); *descr = NULL; + *tp_version = ga_version; return GETSET_OVERRIDDEN; } /* Potentially has __getattr__ but no custom __getattribute__. @@ -932,16 +937,18 @@ analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr) { } else { *descr = NULL; + *tp_version = FT_ATOMIC_LOAD_UINT_RELAXED(type->tp_version_tag); return GETSET_OVERRIDDEN; } - PyObject *descriptor = _PyType_LookupRef(type, name); + unsigned int descr_version; + PyObject *descriptor = _PyType_LookupRefAndVersion(type, name, &descr_version); *descr = descriptor; + *tp_version = have_ga_version ? ga_version : descr_version; if (descriptor_is_class(descriptor, name)) { return DUNDER_CLASS; } return classify_descriptor(descriptor, has_getattr); } -#endif //!Py_GIL_DISABLED static DescriptorClassification analyze_descriptor_store(PyTypeObject *type, PyObject *name, PyObject **descr, unsigned int *tp_version) @@ -961,12 +968,13 @@ analyze_descriptor_store(PyTypeObject *type, PyObject *name, PyObject **descr, u static int specialize_dict_access_inline( PyObject *owner, _Py_CODEUNIT *instr, PyTypeObject *type, - DescriptorClassification kind, PyObject *name, unsigned int tp_version, + PyObject *name, unsigned int tp_version, int base_op, int values_op) { _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); PyDictKeysObject *keys = ((PyHeapTypeObject *)type)->ht_cached_keys; assert(PyUnicode_CheckExact(name)); + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(owner); Py_ssize_t index = _PyDictKeys_StringLookupSplit(keys, name); assert (index != DKIX_ERROR); if (index == DKIX_EMPTY) { @@ -974,6 +982,7 @@ specialize_dict_access_inline( return 0; } assert(index >= 0); + assert(_PyObject_InlineValues(owner)->valid); char *value_addr = (char *)&_PyObject_InlineValues(owner)->values[index]; Py_ssize_t offset = value_addr - (char *)owner; if (offset != (uint16_t)offset) { @@ -989,10 +998,13 @@ specialize_dict_access_inline( static int specialize_dict_access_hint( PyDictObject *dict, _Py_CODEUNIT *instr, PyTypeObject *type, - DescriptorClassification kind, PyObject *name, unsigned int tp_version, + PyObject *name, unsigned int tp_version, int base_op, int hint_op) { _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); + + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(dict); + // We found an instance with a __dict__. if (_PyDict_HasSplitTable(dict)) { SPECIALIZATION_FAIL(base_op, SPEC_FAIL_ATTR_SPLIT_DICT); @@ -1036,7 +1048,7 @@ specialize_dict_access( PyDictObject *dict = _PyObject_GetManagedDict(owner); if (dict == NULL) { // managed dict, not materialized, inline values valid - res = specialize_dict_access_inline(owner, instr, type, kind, name, + res = specialize_dict_access_inline(owner, instr, type, name, tp_version, base_op, values_op); } else { @@ -1056,16 +1068,19 @@ specialize_dict_access( int res; Py_BEGIN_CRITICAL_SECTION(dict); // materialized managed dict - res = specialize_dict_access_hint(dict, instr, type, kind, name, + res = specialize_dict_access_hint(dict, instr, type, name, tp_version, base_op, hint_op); Py_END_CRITICAL_SECTION(); return res; } } -#ifndef Py_GIL_DISABLED -static int specialize_attr_loadclassattr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name, - PyObject* descr, DescriptorClassification kind, bool is_method); +static int +specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr, + PyObject *name, PyObject *descr, + unsigned int tp_version, + DescriptorClassification kind, bool is_method, + uint32_t shared_keys_version); static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name); /* Returns true if instances of obj's class are @@ -1074,7 +1089,7 @@ static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyOb * For other objects, we check their actual dictionary. */ static bool -instance_has_key(PyObject *obj, PyObject* name) +instance_has_key(PyObject *obj, PyObject *name, uint32_t *shared_keys_version) { PyTypeObject *cls = Py_TYPE(obj); if ((cls->tp_flags & Py_TPFLAGS_MANAGED_DICT) == 0) { @@ -1082,36 +1097,38 @@ instance_has_key(PyObject *obj, PyObject* name) } if (cls->tp_flags & Py_TPFLAGS_INLINE_VALUES) { PyDictKeysObject *keys = ((PyHeapTypeObject *)cls)->ht_cached_keys; - Py_ssize_t index = _PyDictKeys_StringLookup(keys, name); + Py_ssize_t index = + _PyDictKeys_StringLookupAndVersion(keys, name, shared_keys_version); return index >= 0; } PyDictObject *dict = _PyObject_GetManagedDict(obj); if (dict == NULL || !PyDict_CheckExact(dict)) { return false; } + bool result; + Py_BEGIN_CRITICAL_SECTION(dict); if (dict->ma_values) { - return false; + result = false; } - Py_ssize_t index = _PyDict_LookupIndex(dict, name); - if (index < 0) { - return false; + else { + result = (_PyDict_LookupIndex(dict, name) >= 0); } - return true; + Py_END_CRITICAL_SECTION(); + return result; } static int -specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name) +do_specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name, + bool shadow, uint32_t shared_keys_version, + DescriptorClassification kind, PyObject *descr, unsigned int tp_version) { _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); PyTypeObject *type = Py_TYPE(owner); - bool shadow = instance_has_key(owner, name); - PyObject *descr = NULL; - DescriptorClassification kind = analyze_descriptor_load(type, name, &descr); - Py_XDECREF(descr); // turn strong ref into a borrowed ref - assert(descr != NULL || kind == ABSENT || kind == GETSET_OVERRIDDEN); - if (type_get_version(type, LOAD_ATTR) == 0) { + if (tp_version == 0) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS); return -1; } + uint8_t oparg = FT_ATOMIC_LOAD_UINT8_RELAXED(instr->op.arg); switch(kind) { case OVERRIDING: SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR); @@ -1121,9 +1138,10 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na if (shadow) { goto try_instance; } - int oparg = instr->op.arg; if (oparg & 1) { - if (specialize_attr_loadclassattr(owner, instr, name, descr, kind, true)) { + if (specialize_attr_loadclassattr(owner, instr, name, descr, + tp_version, kind, true, + shared_keys_version)) { return 0; } else { @@ -1149,7 +1167,7 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na if (!function_check_args(fget, 1, LOAD_ATTR)) { return -1; } - if (instr->op.arg & 1) { + if (oparg & 1) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD); return -1; } @@ -1158,10 +1176,16 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); return -1; } - assert(type->tp_version_tag != 0); - write_u32(lm_cache->type_version, type->tp_version_tag); + #ifdef Py_GIL_DISABLED + if (!_PyObject_HasDeferredRefcount(fget)) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED); + return -1; + } + #endif + assert(tp_version != 0); + write_u32(lm_cache->type_version, tp_version); /* borrowed */ - write_obj(lm_cache->descr, fget); + write_ptr(lm_cache->descr, fget); specialize(instr, LOAD_ATTR_PROPERTY); return 0; } @@ -1185,7 +1209,7 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na assert(dmem->type == Py_T_OBJECT_EX || dmem->type == _Py_T_OBJECT); assert(offset > 0); cache->index = (uint16_t)offset; - write_u32(cache->version, type->tp_version_tag); + write_u32(cache->version, tp_version); specialize(instr, LOAD_ATTR_SLOT); return 0; } @@ -1194,7 +1218,7 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na Py_ssize_t offset = offsetof(PyObject, ob_type); assert(offset == (uint16_t)offset); cache->index = (uint16_t)offset; - write_u32(cache->version, type->tp_version_tag); + write_u32(cache->version, tp_version); specialize(instr, LOAD_ATTR_SLOT); return 0; } @@ -1209,13 +1233,18 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na return -1; case GETATTRIBUTE_IS_PYTHON_FUNCTION: { + #ifndef Py_GIL_DISABLED + // In free-threaded builds it's possible for tp_getattro to change + // after the call to analyze_descriptor. That is fine: the version + // guard will fail. assert(type->tp_getattro == _Py_slot_tp_getattro); + #endif assert(Py_IS_TYPE(descr, &PyFunction_Type)); _PyLoadMethodCache *lm_cache = (_PyLoadMethodCache *)(instr + 1); if (!function_check_args(descr, 2, LOAD_ATTR)) { return -1; } - if (instr->op.arg & 1) { + if (oparg & 1) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD); return -1; } @@ -1228,10 +1257,16 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); return -1; } + #ifdef Py_GIL_DISABLED + if (!_PyObject_HasDeferredRefcount(descr)) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED); + return -1; + } + #endif write_u32(lm_cache->keys_version, version); /* borrowed */ - write_obj(lm_cache->descr, descr); - write_u32(lm_cache->type_version, type->tp_version_tag); + write_ptr(lm_cache->descr, descr); + write_u32(lm_cache->type_version, tp_version); specialize(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); return 0; } @@ -1246,8 +1281,10 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na if (shadow) { goto try_instance; } - if ((instr->op.arg & 1) == 0) { - if (specialize_attr_loadclassattr(owner, instr, name, descr, kind, false)) { + if ((oparg & 1) == 0) { + if (specialize_attr_loadclassattr(owner, instr, name, descr, + tp_version, kind, false, + shared_keys_version)) { return 0; } } @@ -1261,14 +1298,28 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na } Py_UNREACHABLE(); try_instance: - if (specialize_dict_access(owner, instr, type, kind, name, type->tp_version_tag, + if (specialize_dict_access(owner, instr, type, kind, name, tp_version, LOAD_ATTR, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT)) { return 0; } return -1; } -#endif // Py_GIL_DISABLED + +static int +specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name) +{ + // 0 is not a valid version + uint32_t shared_keys_version = 0; + bool shadow = instance_has_key(owner, name, &shared_keys_version); + PyObject *descr = NULL; + unsigned int tp_version = 0; + PyTypeObject *type = Py_TYPE(owner); + DescriptorClassification kind = analyze_descriptor_load(type, name, &descr, &tp_version); + int result = do_specialize_instance_load_attr(owner, instr, name, shadow, shared_keys_version, kind, descr, tp_version); + Py_XDECREF(descr); + return result; +} void _Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *name) @@ -1290,20 +1341,10 @@ _Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *nam fail = specialize_module_load_attr(owner, instr, name); } else if (PyType_Check(owner)) { - #ifdef Py_GIL_DISABLED - SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR); - fail = true; - #else fail = specialize_class_load_attr(owner, instr, name); - #endif } else { - #ifdef Py_GIL_DISABLED - SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR); - fail = true; - #else fail = specialize_instance_load_attr(owner, instr, name); - #endif } if (fail) { @@ -1411,8 +1452,6 @@ _Py_Specialize_StoreAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *na return; } -#ifndef Py_GIL_DISABLED - #ifdef Py_STATS static int load_attr_fail_kind(DescriptorClassification kind) @@ -1461,8 +1500,10 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METACLASS_OVERRIDDEN); return -1; } - PyObject *metadescriptor = _PyType_Lookup(Py_TYPE(cls), name); + unsigned int meta_version = 0; + PyObject *metadescriptor = _PyType_LookupRefAndVersion(Py_TYPE(cls), name, &meta_version); DescriptorClassification metakind = classify_descriptor(metadescriptor, false); + Py_XDECREF(metadescriptor); switch (metakind) { case METHOD: case NON_DESCRIPTOR: @@ -1477,38 +1518,52 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, } PyObject *descr = NULL; DescriptorClassification kind = 0; - kind = analyze_descriptor_load(cls, name, &descr); - Py_XDECREF(descr); // turn strong ref into a borrowed ref - if (type_get_version(cls, LOAD_ATTR) == 0) { + unsigned int tp_version = 0; + kind = analyze_descriptor_load(cls, name, &descr, &tp_version); + if (tp_version == 0) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS); + Py_XDECREF(descr); return -1; } bool metaclass_check = false; if ((Py_TYPE(cls)->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) == 0) { metaclass_check = true; - if (type_get_version(Py_TYPE(cls), LOAD_ATTR) == 0) { + if (meta_version == 0) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS); + Py_XDECREF(descr); return -1; } } switch (kind) { case METHOD: case NON_DESCRIPTOR: - write_u32(cache->type_version, cls->tp_version_tag); - write_obj(cache->descr, descr); + #ifdef Py_GIL_DISABLED + if (!_PyObject_HasDeferredRefcount(descr)) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED); + Py_XDECREF(descr); + return -1; + } + #endif + write_u32(cache->type_version, tp_version); + write_ptr(cache->descr, descr); if (metaclass_check) { - write_u32(cache->keys_version, Py_TYPE(cls)->tp_version_tag); + write_u32(cache->keys_version, meta_version); specialize(instr, LOAD_ATTR_CLASS_WITH_METACLASS_CHECK); } else { specialize(instr, LOAD_ATTR_CLASS); } + Py_XDECREF(descr); return 0; #ifdef Py_STATS case ABSENT: SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR); + Py_XDECREF(descr); return -1; #endif default: SPECIALIZATION_FAIL(LOAD_ATTR, load_attr_fail_kind(kind)); + Py_XDECREF(descr); return -1; } } @@ -1517,29 +1572,41 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, // can cause a significant drop in cache hits. A possible test is // python.exe -m test_typing test_re test_dis test_zlib. static int -specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, -PyObject *descr, DescriptorClassification kind, bool is_method) +specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr, + PyObject *name, PyObject *descr, + unsigned int tp_version, + DescriptorClassification kind, bool is_method, + uint32_t shared_keys_version) { _PyLoadMethodCache *cache = (_PyLoadMethodCache *)(instr + 1); PyTypeObject *owner_cls = Py_TYPE(owner); assert(descr != NULL); assert((is_method && kind == METHOD) || (!is_method && kind == NON_DESCRIPTOR)); - if (owner_cls->tp_flags & Py_TPFLAGS_INLINE_VALUES) { - PyDictKeysObject *keys = ((PyHeapTypeObject *)owner_cls)->ht_cached_keys; - assert(_PyDictKeys_StringLookup(keys, name) < 0); - uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState( - _PyInterpreterState_GET(), keys); - if (keys_version == 0) { + + #ifdef Py_GIL_DISABLED + if (!_PyObject_HasDeferredRefcount(descr)) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED); + return 0; + } + #endif + + unsigned long tp_flags = PyType_GetFlags(owner_cls); + if (tp_flags & Py_TPFLAGS_INLINE_VALUES) { + #ifndef Py_GIL_DISABLED + assert(_PyDictKeys_StringLookup( + ((PyHeapTypeObject *)owner_cls)->ht_cached_keys, name) < 0); + #endif + if (shared_keys_version == 0) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS); return 0; } - write_u32(cache->keys_version, keys_version); + write_u32(cache->keys_version, shared_keys_version); specialize(instr, is_method ? LOAD_ATTR_METHOD_WITH_VALUES : LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES); } else { Py_ssize_t dictoffset; - if (owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT) { + if (tp_flags & Py_TPFLAGS_MANAGED_DICT) { dictoffset = MANAGED_DICT_OFFSET; } else { @@ -1585,13 +1652,11 @@ PyObject *descr, DescriptorClassification kind, bool is_method) * PyType_Modified usages in typeobject.c). The MCACHE has been * working since Python 2.6 and it's battle-tested. */ - write_u32(cache->type_version, owner_cls->tp_version_tag); - write_obj(cache->descr, descr); + write_u32(cache->type_version, tp_version); + write_ptr(cache->descr, descr); return 1; } -#endif // Py_GIL_DISABLED - static void specialize_load_global_lock_held( @@ -1738,7 +1803,6 @@ function_kind(PyCodeObject *code) { return SIMPLE_FUNCTION; } -#ifndef Py_GIL_DISABLED /* Returning false indicates a failure. */ static bool function_check_args(PyObject *o, int expected_argcount, int opcode) @@ -1772,19 +1836,6 @@ function_get_version(PyObject *o, int opcode) return version; } -/* Returning 0 indicates a failure. */ -static uint32_t -type_get_version(PyTypeObject *t, int opcode) -{ - uint32_t version = t->tp_version_tag; - if (version == 0) { - SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_VERSIONS); - return 0; - } - return version; -} -#endif // Py_GIL_DISABLED - void _Py_Specialize_BinarySubscr( _PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr) @@ -2339,6 +2390,12 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) return SPEC_FAIL_BINARY_OP_MULTIPLY_OTHER; case NB_OR: case NB_INPLACE_OR: + if (!Py_IS_TYPE(lhs, Py_TYPE(rhs))) { + return SPEC_FAIL_BINARY_OP_OR_DIFFERENT_TYPES; + } + if (PyLong_CheckExact(lhs)) { + return SPEC_FAIL_BINARY_OP_OR_INT; + } return SPEC_FAIL_BINARY_OP_OR; case NB_POWER: case NB_INPLACE_POWER: @@ -2366,12 +2423,160 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) return SPEC_FAIL_BINARY_OP_TRUE_DIVIDE_OTHER; case NB_XOR: case NB_INPLACE_XOR: + if (!Py_IS_TYPE(lhs, Py_TYPE(rhs))) { + return SPEC_FAIL_BINARY_OP_XOR_DIFFERENT_TYPES; + } + if (PyLong_CheckExact(lhs)) { + return SPEC_FAIL_BINARY_OP_XOR_INT; + } return SPEC_FAIL_BINARY_OP_XOR; } Py_UNREACHABLE(); } #endif +/** Binary Op Specialization Extensions */ + +/* long-long */ + +static inline int +is_compactlong(PyObject *v) +{ + return PyLong_CheckExact(v) && + _PyLong_IsCompact((PyLongObject *)v); +} + +static int +compactlongs_guard(PyObject *lhs, PyObject *rhs) +{ + return (is_compactlong(lhs) && is_compactlong(rhs)); +} + +#define BITWISE_LONGS_ACTION(NAME, OP) \ + static PyObject * \ + (NAME)(PyObject *lhs, PyObject *rhs) \ + { \ + Py_ssize_t rhs_val = _PyLong_CompactValue((PyLongObject *)rhs); \ + Py_ssize_t lhs_val = _PyLong_CompactValue((PyLongObject *)lhs); \ + return PyLong_FromSsize_t(lhs_val OP rhs_val); \ + } +BITWISE_LONGS_ACTION(compactlongs_or, |) +BITWISE_LONGS_ACTION(compactlongs_and, &) +BITWISE_LONGS_ACTION(compactlongs_xor, ^) +#undef BITWISE_LONGS_ACTION + +/* float-long */ + +static inline int +float_compactlong_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + PyFloat_CheckExact(lhs) && + !isnan(PyFloat_AsDouble(lhs)) && + PyLong_CheckExact(rhs) && + _PyLong_IsCompact((PyLongObject *)rhs) + ); +} + +static inline int +nonzero_float_compactlong_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + float_compactlong_guard(lhs, rhs) && !PyLong_IsZero(rhs) + ); +} + +#define FLOAT_LONG_ACTION(NAME, OP) \ + static PyObject * \ + (NAME)(PyObject *lhs, PyObject *rhs) \ + { \ + double lhs_val = PyFloat_AsDouble(lhs); \ + Py_ssize_t rhs_val = _PyLong_CompactValue((PyLongObject *)rhs); \ + return PyFloat_FromDouble(lhs_val OP rhs_val); \ + } +FLOAT_LONG_ACTION(float_compactlong_add, +) +FLOAT_LONG_ACTION(float_compactlong_subtract, -) +FLOAT_LONG_ACTION(float_compactlong_multiply, *) +FLOAT_LONG_ACTION(float_compactlong_true_div, /) +#undef FLOAT_LONG_ACTION + +/* long-float */ + +static inline int +compactlong_float_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + PyLong_CheckExact(lhs) && + _PyLong_IsCompact((PyLongObject *)lhs) && + PyFloat_CheckExact(rhs) && + !isnan(PyFloat_AsDouble(rhs)) + ); +} + +static inline int +nonzero_compactlong_float_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + compactlong_float_guard(lhs, rhs) && PyFloat_AsDouble(rhs) != 0.0 + ); +} + +#define LONG_FLOAT_ACTION(NAME, OP) \ + static PyObject * \ + (NAME)(PyObject *lhs, PyObject *rhs) \ + { \ + double rhs_val = PyFloat_AsDouble(rhs); \ + Py_ssize_t lhs_val = _PyLong_CompactValue((PyLongObject *)lhs); \ + return PyFloat_FromDouble(lhs_val OP rhs_val); \ + } +LONG_FLOAT_ACTION(compactlong_float_add, +) +LONG_FLOAT_ACTION(compactlong_float_subtract, -) +LONG_FLOAT_ACTION(compactlong_float_multiply, *) +LONG_FLOAT_ACTION(compactlong_float_true_div, /) +#undef LONG_FLOAT_ACTION + +static _PyBinaryOpSpecializationDescr compactlongs_specs[NB_OPARG_LAST+1] = { + [NB_OR] = {compactlongs_guard, compactlongs_or}, + [NB_AND] = {compactlongs_guard, compactlongs_and}, + [NB_XOR] = {compactlongs_guard, compactlongs_xor}, + [NB_INPLACE_OR] = {compactlongs_guard, compactlongs_or}, + [NB_INPLACE_AND] = {compactlongs_guard, compactlongs_and}, + [NB_INPLACE_XOR] = {compactlongs_guard, compactlongs_xor}, +}; + +static _PyBinaryOpSpecializationDescr float_compactlong_specs[NB_OPARG_LAST+1] = { + [NB_ADD] = {float_compactlong_guard, float_compactlong_add}, + [NB_SUBTRACT] = {float_compactlong_guard, float_compactlong_subtract}, + [NB_TRUE_DIVIDE] = {nonzero_float_compactlong_guard, float_compactlong_true_div}, + [NB_MULTIPLY] = {float_compactlong_guard, float_compactlong_multiply}, +}; + +static _PyBinaryOpSpecializationDescr compactlong_float_specs[NB_OPARG_LAST+1] = { + [NB_ADD] = {compactlong_float_guard, compactlong_float_add}, + [NB_SUBTRACT] = {compactlong_float_guard, compactlong_float_subtract}, + [NB_TRUE_DIVIDE] = {nonzero_compactlong_float_guard, compactlong_float_true_div}, + [NB_MULTIPLY] = {compactlong_float_guard, compactlong_float_multiply}, +}; + +static int +binary_op_extended_specialization(PyObject *lhs, PyObject *rhs, int oparg, + _PyBinaryOpSpecializationDescr **descr) +{ +#define LOOKUP_SPEC(TABLE, OPARG) \ + if ((TABLE)[(OPARG)].action) { \ + if ((TABLE)[(OPARG)].guard(lhs, rhs)) { \ + *descr = &((TABLE)[OPARG]); \ + return 1; \ + } \ + } + + LOOKUP_SPEC(compactlong_float_specs, oparg); + LOOKUP_SPEC(float_compactlong_specs, oparg); + LOOKUP_SPEC(compactlongs_specs, oparg); +#undef LOOKUP_SPEC + return 0; +} + void _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr, int oparg, _PyStackRef *locals) @@ -2380,6 +2585,12 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st); assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP); + + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1); + if (instr->op.code == BINARY_OP_EXTEND) { + write_ptr(cache->external_cache, NULL); + } + switch (oparg) { case NB_ADD: case NB_INPLACE_ADD: @@ -2434,8 +2645,17 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in } break; } + + _PyBinaryOpSpecializationDescr *descr; + if (binary_op_extended_specialization(lhs, rhs, oparg, &descr)) { + specialize(instr, BINARY_OP_EXTEND); + write_ptr(cache->external_cache, (void*)descr); + return; + } + SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); unspecialize(instr); + return; } @@ -2480,23 +2700,23 @@ _Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *i { PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st); PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st); + uint8_t specialized_op; - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP); // All of these specializations compute boolean values, so they're all valid // regardless of the fifth-lowest oparg bit. - _PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + 1); if (Py_TYPE(lhs) != Py_TYPE(rhs)) { SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); goto failure; } if (PyFloat_CheckExact(lhs)) { - instr->op.code = COMPARE_OP_FLOAT; + specialized_op = COMPARE_OP_FLOAT; goto success; } if (PyLong_CheckExact(lhs)) { if (_PyLong_IsCompact((PyLongObject *)lhs) && _PyLong_IsCompact((PyLongObject *)rhs)) { - instr->op.code = COMPARE_OP_INT; + specialized_op = COMPARE_OP_INT; goto success; } else { @@ -2511,19 +2731,16 @@ _Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *i goto failure; } else { - instr->op.code = COMPARE_OP_STR; + specialized_op = COMPARE_OP_STR; goto success; } } SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); failure: - STAT_INC(COMPARE_OP, failure); - instr->op.code = COMPARE_OP; - cache->counter = adaptive_counter_backoff(cache->counter); + unspecialize(instr); return; success: - STAT_INC(COMPARE_OP, success); - cache->counter = adaptive_counter_cooldown(); + specialize(instr, specialized_op); } #ifdef Py_STATS diff --git a/Python/sysmodule.c b/Python/sysmodule.c index d6719f9bb0af91..d5cb448eb618e8 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -972,6 +972,23 @@ sys__is_interned_impl(PyObject *module, PyObject *string) return PyUnicode_CHECK_INTERNED(string); } +/*[clinic input] +sys._is_immortal -> bool + + op: object + / + +Return True if the given object is "immortal" per PEP 683. + +This function should be used for specialized purposes only. +[clinic start generated code]*/ + +static int +sys__is_immortal_impl(PyObject *module, PyObject *op) +/*[clinic end generated code: output=c2f5d6a80efb8d1a input=4609c9bf5481db76]*/ +{ + return PyUnstable_IsImmortal(op); +} /* * Cached interned string objects used for calling the profile and @@ -2265,9 +2282,7 @@ sys_activate_stack_trampoline_impl(PyObject *module, const char *backend) { #ifdef PY_HAVE_PERF_TRAMPOLINE #ifdef _Py_JIT - _PyOptimizerObject* optimizer = _Py_GetOptimizer(); - if (optimizer != NULL) { - Py_DECREF(optimizer); + if (_PyInterpreterState_GET()->jit) { PyErr_SetString(PyExc_ValueError, "Cannot activate the perf trampoline if the JIT is active"); return NULL; } @@ -2356,7 +2371,7 @@ static PyObject * sys__dump_tracelets_impl(PyObject *module, PyObject *outpath) /*[clinic end generated code: output=a7fe265e2bc3b674 input=5bff6880cd28ffd1]*/ { - FILE *out = _Py_fopen_obj(outpath, "wb"); + FILE *out = Py_fopen(outpath, "wb"); if (out == NULL) { return NULL; } @@ -2590,6 +2605,7 @@ static PyMethodDef sys_methods[] = { SYS__GETFRAMEMODULENAME_METHODDEF SYS_GETWINDOWSVERSION_METHODDEF SYS__ENABLELEGACYWINDOWSFSENCODING_METHODDEF + SYS__IS_IMMORTAL_METHODDEF SYS_INTERN_METHODDEF SYS__IS_INTERNED_METHODDEF SYS_IS_FINALIZING_METHODDEF @@ -2849,6 +2865,7 @@ PySys_ResetWarnOptions(void) static int _PySys_AddWarnOptionWithError(PyThreadState *tstate, PyObject *option) { + assert(tstate != NULL); PyObject *warnoptions = get_warnoptions(tstate); if (warnoptions == NULL) { return -1; @@ -2864,11 +2881,11 @@ PyAPI_FUNC(void) PySys_AddWarnOptionUnicode(PyObject *option) { PyThreadState *tstate = _PyThreadState_GET(); + _Py_EnsureTstateNotNULL(tstate); + assert(!_PyErr_Occurred(tstate)); if (_PySys_AddWarnOptionWithError(tstate, option) < 0) { /* No return value, therefore clear error state if possible */ - if (tstate) { - _PyErr_Clear(tstate); - } + _PyErr_Clear(tstate); } } diff --git a/Python/traceback.c b/Python/traceback.c index e819909b6045c3..870ae5bcefe9eb 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -38,6 +38,8 @@ class traceback "PyTracebackObject *" "&PyTraceback_Type" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=cf96294b2bebc811]*/ +#define _PyTracebackObject_CAST(op) ((PyTracebackObject *)(op)) + #include "clinic/traceback.c.h" static PyObject * @@ -91,15 +93,16 @@ tb_new_impl(PyTypeObject *type, PyObject *tb_next, PyFrameObject *tb_frame, } static PyObject * -tb_dir(PyTracebackObject *self, PyObject *Py_UNUSED(ignored)) +tb_dir(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { return Py_BuildValue("[ssss]", "tb_frame", "tb_next", "tb_lasti", "tb_lineno"); } static PyObject * -tb_next_get(PyTracebackObject *self, void *Py_UNUSED(_)) +tb_next_get(PyObject *op, void *Py_UNUSED(_)) { + PyTracebackObject *self = _PyTracebackObject_CAST(op); PyObject* ret = (PyObject*)self->tb_next; if (!ret) { ret = Py_None; @@ -108,18 +111,21 @@ tb_next_get(PyTracebackObject *self, void *Py_UNUSED(_)) } static int -tb_get_lineno(PyTracebackObject* tb) { +tb_get_lineno(PyObject *op) +{ + PyTracebackObject *tb = _PyTracebackObject_CAST(op); _PyInterpreterFrame* frame = tb->tb_frame->f_frame; assert(frame != NULL); return PyCode_Addr2Line(_PyFrame_GetCode(frame), tb->tb_lasti); } static PyObject * -tb_lineno_get(PyTracebackObject *self, void *Py_UNUSED(_)) +tb_lineno_get(PyObject *op, void *Py_UNUSED(_)) { + PyTracebackObject *self = _PyTracebackObject_CAST(op); int lineno = self->tb_lineno; if (lineno == -1) { - lineno = tb_get_lineno(self); + lineno = tb_get_lineno(op); if (lineno < 0) { Py_RETURN_NONE; } @@ -128,7 +134,7 @@ tb_lineno_get(PyTracebackObject *self, void *Py_UNUSED(_)) } static int -tb_next_set(PyTracebackObject *self, PyObject *new_next, void *Py_UNUSED(_)) +tb_next_set(PyObject *op, PyObject *new_next, void *Py_UNUSED(_)) { if (!new_next) { PyErr_Format(PyExc_TypeError, "can't delete tb_next attribute"); @@ -147,6 +153,7 @@ tb_next_set(PyTracebackObject *self, PyObject *new_next, void *Py_UNUSED(_)) } /* Check for loops */ + PyTracebackObject *self = _PyTracebackObject_CAST(op); PyTracebackObject *cursor = (PyTracebackObject *)new_next; while (cursor) { if (cursor == self) { @@ -163,7 +170,7 @@ tb_next_set(PyTracebackObject *self, PyObject *new_next, void *Py_UNUSED(_)) static PyMethodDef tb_methods[] = { - {"__dir__", _PyCFunction_CAST(tb_dir), METH_NOARGS}, + {"__dir__", tb_dir, METH_NOARGS, NULL}, {NULL, NULL, 0, NULL}, }; @@ -174,14 +181,15 @@ static PyMemberDef tb_memberlist[] = { }; static PyGetSetDef tb_getsetters[] = { - {"tb_next", (getter)tb_next_get, (setter)tb_next_set, NULL, NULL}, - {"tb_lineno", (getter)tb_lineno_get, NULL, NULL, NULL}, + {"tb_next", tb_next_get, tb_next_set, NULL, NULL}, + {"tb_lineno", tb_lineno_get, NULL, NULL, NULL}, {NULL} /* Sentinel */ }; static void -tb_dealloc(PyTracebackObject *tb) +tb_dealloc(PyObject *op) { + PyTracebackObject *tb = _PyTracebackObject_CAST(op); PyObject_GC_UnTrack(tb); Py_TRASHCAN_BEGIN(tb, tb_dealloc) Py_XDECREF(tb->tb_next); @@ -191,16 +199,18 @@ tb_dealloc(PyTracebackObject *tb) } static int -tb_traverse(PyTracebackObject *tb, visitproc visit, void *arg) +tb_traverse(PyObject *op, visitproc visit, void *arg) { + PyTracebackObject *tb = _PyTracebackObject_CAST(op); Py_VISIT(tb->tb_next); Py_VISIT(tb->tb_frame); return 0; } static int -tb_clear(PyTracebackObject *tb) +tb_clear(PyObject *op) { + PyTracebackObject *tb = _PyTracebackObject_CAST(op); Py_CLEAR(tb->tb_next); Py_CLEAR(tb->tb_frame); return 0; @@ -211,7 +221,7 @@ PyTypeObject PyTraceBack_Type = { "traceback", sizeof(PyTracebackObject), 0, - (destructor)tb_dealloc, /*tp_dealloc*/ + tb_dealloc, /*tp_dealloc*/ 0, /*tp_vectorcall_offset*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ @@ -228,8 +238,8 @@ PyTypeObject PyTraceBack_Type = { 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ tb_new__doc__, /* tp_doc */ - (traverseproc)tb_traverse, /* tp_traverse */ - (inquiry)tb_clear, /* tp_clear */ + tb_traverse, /* tp_traverse */ + tb_clear, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ @@ -663,7 +673,7 @@ tb_printinternal(PyTracebackObject *tb, PyObject *f, long limit) code = PyFrame_GetCode(tb->tb_frame); int tb_lineno = tb->tb_lineno; if (tb_lineno == -1) { - tb_lineno = tb_get_lineno(tb); + tb_lineno = tb_get_lineno((PyObject *)tb); } if (last_file == NULL || code->co_filename != last_file || @@ -890,7 +900,7 @@ _Py_DumpASCII(int fd, PyObject *text) static void dump_frame(int fd, _PyInterpreterFrame *frame) { - assert(frame->owner != FRAME_OWNED_BY_CSTACK); + assert(frame->owner < FRAME_OWNED_BY_INTERPRETER); PyCodeObject *code =_PyFrame_GetCode(frame); PUTS(fd, " File "); @@ -965,7 +975,7 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) unsigned int depth = 0; while (1) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner == FRAME_OWNED_BY_INTERPRETER) { /* Trampoline frame */ frame = frame->previous; if (frame == NULL) { @@ -973,7 +983,7 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) } /* Can't have more than one shim frame in a row */ - assert(frame->owner != FRAME_OWNED_BY_CSTACK); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); } if (MAX_FRAME_DEPTH <= depth) { diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c index f661d69c0312fa..d69b0ebd585a7f 100644 --- a/Python/tracemalloc.c +++ b/Python/tracemalloc.c @@ -2,6 +2,8 @@ #include "pycore_fileutils.h" // _Py_write_noraise() #include "pycore_gc.h" // PyGC_Head #include "pycore_hashtable.h" // _Py_hashtable_t +#include "pycore_initconfig.h" // _PyStatus_NO_MEMORY() +#include "pycore_lock.h" // PyMutex_LockFlags() #include "pycore_object.h" // _PyType_PreHeaderSize() #include "pycore_pymem.h" // _Py_tracemalloc_config #include "pycore_runtime.h" // _Py_ID() @@ -19,6 +21,8 @@ _Py_DECLARE_STR(anon_unknown, ""); /* Forward declaration */ static void* raw_malloc(size_t size); static void raw_free(void *ptr); +static int _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, + void* Py_UNUSED(ignore)); #ifdef Py_DEBUG # define TRACE_DEBUG @@ -30,18 +34,12 @@ static void raw_free(void *ptr); #define allocators _PyRuntime.tracemalloc.allocators -#if defined(TRACE_RAW_MALLOC) /* This lock is needed because tracemalloc_free() is called without the GIL held from PyMem_RawFree(). It cannot acquire the lock because it would introduce a deadlock in _PyThreadState_DeleteCurrent(). */ -# define tables_lock _PyRuntime.tracemalloc.tables_lock -# define TABLES_LOCK() PyThread_acquire_lock(tables_lock, 1) -# define TABLES_UNLOCK() PyThread_release_lock(tables_lock) -#else - /* variables are protected by the GIL */ -# define TABLES_LOCK() -# define TABLES_UNLOCK() -#endif +#define tables_lock _PyRuntime.tracemalloc.tables_lock +#define TABLES_LOCK() PyMutex_LockFlags(&tables_lock, _Py_LOCK_DONT_DETACH) +#define TABLES_UNLOCK() PyMutex_Unlock(&tables_lock) #define DEFAULT_DOMAIN 0 @@ -95,9 +93,6 @@ tracemalloc_error(const char *format, ...) #endif -#if defined(TRACE_RAW_MALLOC) -#define REENTRANT_THREADLOCAL - #define tracemalloc_reentrant_key _PyRuntime.tracemalloc.reentrant_key /* Any non-NULL pointer can be used */ @@ -106,16 +101,16 @@ tracemalloc_error(const char *format, ...) static int get_reentrant(void) { - void *ptr; - assert(PyThread_tss_is_created(&tracemalloc_reentrant_key)); - ptr = PyThread_tss_get(&tracemalloc_reentrant_key); + + void *ptr = PyThread_tss_get(&tracemalloc_reentrant_key); if (ptr != NULL) { assert(ptr == REENTRANT); return 1; } - else + else { return 0; + } } static void @@ -134,25 +129,6 @@ set_reentrant(int reentrant) } } -#else - -/* TRACE_RAW_MALLOC not defined: variable protected by the GIL */ -static int tracemalloc_reentrant = 0; - -static int -get_reentrant(void) -{ - return tracemalloc_reentrant; -} - -static void -set_reentrant(int reentrant) -{ - assert(reentrant != tracemalloc_reentrant); - tracemalloc_reentrant = reentrant; -} -#endif - static Py_uhash_t hashtable_hash_pyobject(const void *key) @@ -252,6 +228,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) { assert(PyStackRef_CodeCheck(pyframe->f_executable)); frame->filename = &_Py_STR(anon_unknown); + int lineno = PyUnstable_InterpreterFrame_GetLine(pyframe); if (lineno < 0) { lineno = 0; @@ -259,7 +236,6 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) frame->lineno = (unsigned int)lineno; PyObject *filename = filename = _PyFrame_GetCode(pyframe)->co_filename; - if (filename == NULL) { #ifdef TRACE_DEBUG tracemalloc_error("failed to get the filename of the code object"); @@ -275,7 +251,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) } if (!PyUnicode_IS_READY(filename)) { /* Don't make a Unicode string ready to avoid reentrant calls - to tracemalloc_malloc() or tracemalloc_realloc() */ + to tracemalloc_alloc() or tracemalloc_realloc() */ #ifdef TRACE_DEBUG tracemalloc_error("filename is not a ready unicode string"); #endif @@ -309,7 +285,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) static Py_uhash_t traceback_hash(traceback_t *traceback) { - /* code based on tuplehash() of Objects/tupleobject.c */ + /* code based on tuple_hash() of Objects/tupleobject.c */ Py_uhash_t x, y; /* Unsigned for defined overflow behavior. */ int len = traceback->nframe; Py_uhash_t mult = PyHASH_MULTIPLIER; @@ -335,13 +311,8 @@ traceback_hash(traceback_t *traceback) static void traceback_get_frames(traceback_t *traceback) { - PyThreadState *tstate = PyGILState_GetThisThreadState(); - if (tstate == NULL) { -#ifdef TRACE_DEBUG - tracemalloc_error("failed to get the current thread state"); -#endif - return; - } + PyThreadState *tstate = _PyThreadState_GET(); + assert(tstate != NULL); _PyInterpreterFrame *pyframe = _PyThreadState_GetFrame(tstate); while (pyframe) { @@ -364,7 +335,7 @@ traceback_new(void) traceback_t *traceback; _Py_hashtable_entry_t *entry; - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); /* get frames */ traceback = tracemalloc_traceback; @@ -440,7 +411,7 @@ tracemalloc_get_traces_table(unsigned int domain) static void -tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) +tracemalloc_remove_trace_unlocked(unsigned int domain, uintptr_t ptr) { assert(tracemalloc_config.tracing); @@ -459,12 +430,12 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) } #define REMOVE_TRACE(ptr) \ - tracemalloc_remove_trace(DEFAULT_DOMAIN, (uintptr_t)(ptr)) + tracemalloc_remove_trace_unlocked(DEFAULT_DOMAIN, (uintptr_t)(ptr)) static int -tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, - size_t size) +tracemalloc_add_trace_unlocked(unsigned int domain, uintptr_t ptr, + size_t size) { assert(tracemalloc_config.tracing); @@ -519,82 +490,147 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, } #define ADD_TRACE(ptr, size) \ - tracemalloc_add_trace(DEFAULT_DOMAIN, (uintptr_t)(ptr), size) + tracemalloc_add_trace_unlocked(DEFAULT_DOMAIN, (uintptr_t)(ptr), size) static void* -tracemalloc_alloc(int use_calloc, void *ctx, size_t nelem, size_t elsize) +tracemalloc_alloc(int need_gil, int use_calloc, + void *ctx, size_t nelem, size_t elsize) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - void *ptr; - assert(elsize == 0 || nelem <= SIZE_MAX / elsize); - if (use_calloc) + int reentrant = get_reentrant(); + + // Ignore reentrant call. + // + // For example, PyObjet_Malloc() calls + // PyMem_Malloc() for allocations larger than 512 bytes: don't trace the + // same memory allocation twice. + // + // If reentrant calls are not ignored, PyGILState_Ensure() can call + // PyMem_RawMalloc() which would call PyGILState_Ensure() again in a loop. + if (!reentrant) { + set_reentrant(1); + } + + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + void *ptr; + if (use_calloc) { ptr = alloc->calloc(alloc->ctx, nelem, elsize); - else + } + else { ptr = alloc->malloc(alloc->ctx, nelem * elsize); - if (ptr == NULL) - return NULL; + } + + if (ptr == NULL) { + goto done; + } + if (reentrant) { + goto done; + } + PyGILState_STATE gil_state; + if (need_gil) { + gil_state = PyGILState_Ensure(); + } TABLES_LOCK(); - if (ADD_TRACE(ptr, nelem * elsize) < 0) { - /* Failed to allocate a trace for the new memory block */ - TABLES_UNLOCK(); - alloc->free(alloc->ctx, ptr); - return NULL; + + if (tracemalloc_config.tracing) { + if (ADD_TRACE(ptr, nelem * elsize) < 0) { + // Failed to allocate a trace for the new memory block + alloc->free(alloc->ctx, ptr); + ptr = NULL; + } } + // else: gh-128679: tracemalloc.stop() was called by another thread + TABLES_UNLOCK(); + if (need_gil) { + PyGILState_Release(gil_state); + } + +done: + if (!reentrant) { + set_reentrant(0); + } return ptr; } static void* -tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) +tracemalloc_realloc(int need_gil, void *ctx, void *ptr, size_t new_size) { + int reentrant = get_reentrant(); + + // Ignore reentrant call. PyObjet_Realloc() calls PyMem_Realloc() for + // allocations larger than 512 bytes: don't trace the same memory block + // twice. + if (!reentrant) { + set_reentrant(1); + } + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - void *ptr2; + void *ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - if (ptr2 == NULL) - return NULL; + if (ptr2 == NULL) { + goto done; + } + if (reentrant) { + goto done; + } - if (ptr != NULL) { - /* an existing memory block has been resized */ + PyGILState_STATE gil_state; + if (need_gil) { + gil_state = PyGILState_Ensure(); + } + TABLES_LOCK(); - TABLES_LOCK(); + if (!tracemalloc_config.tracing) { + // gh-128679: tracemalloc.stop() was called by another thread + goto unlock; + } + + if (ptr != NULL) { + // An existing memory block has been resized - /* tracemalloc_add_trace() updates the trace if there is already - a trace at address ptr2 */ + // tracemalloc_add_trace_unlocked() updates the trace if there is + // already a trace at address ptr2. if (ptr2 != ptr) { REMOVE_TRACE(ptr); } if (ADD_TRACE(ptr2, new_size) < 0) { - /* Memory allocation failed. The error cannot be reported to - the caller, because realloc() may already have shrunk the - memory block and so removed bytes. - - This case is very unlikely: a hash entry has just been - released, so the hash table should have at least one free entry. - - The GIL and the table lock ensures that only one thread is - allocating memory. */ + // Memory allocation failed. The error cannot be reported to the + // caller, because realloc() already have shrunk the memory block + // and so removed bytes. + // + // This case is very unlikely: a hash entry has just been released, + // so the hash table should have at least one free entry. + // + // The GIL and the table lock ensures that only one thread is + // allocating memory. Py_FatalError("tracemalloc_realloc() failed to allocate a trace"); } - TABLES_UNLOCK(); } else { - /* new allocation */ + // New allocation - TABLES_LOCK(); if (ADD_TRACE(ptr2, new_size) < 0) { - /* Failed to allocate a trace for the new memory block */ - TABLES_UNLOCK(); + // Failed to allocate a trace for the new memory block alloc->free(alloc->ctx, ptr2); - return NULL; + ptr2 = NULL; } - TABLES_UNLOCK(); + } + +unlock: + TABLES_UNLOCK(); + if (need_gil) { + PyGILState_Release(gil_state); + } + +done: + if (!reentrant) { + set_reentrant(0); } return ptr2; } @@ -603,170 +639,68 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) static void tracemalloc_free(void *ctx, void *ptr) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - - if (ptr == NULL) + if (ptr == NULL) { return; + } - /* GIL cannot be locked in PyMem_RawFree() because it would introduce - a deadlock in _PyThreadState_DeleteCurrent(). */ - + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; alloc->free(alloc->ctx, ptr); - TABLES_LOCK(); - REMOVE_TRACE(ptr); - TABLES_UNLOCK(); -} - - -static void* -tracemalloc_alloc_gil(int use_calloc, void *ctx, size_t nelem, size_t elsize) -{ - void *ptr; - if (get_reentrant()) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (use_calloc) - return alloc->calloc(alloc->ctx, nelem, elsize); - else - return alloc->malloc(alloc->ctx, nelem * elsize); + return; } - /* Ignore reentrant call. PyObjet_Malloc() calls PyMem_Malloc() for - allocations larger than 512 bytes, don't trace the same memory - allocation twice. */ - set_reentrant(1); + TABLES_LOCK(); - ptr = tracemalloc_alloc(use_calloc, ctx, nelem, elsize); + if (tracemalloc_config.tracing) { + REMOVE_TRACE(ptr); + } + // else: gh-128679: tracemalloc.stop() was called by another thread - set_reentrant(0); - return ptr; + TABLES_UNLOCK(); } static void* tracemalloc_malloc_gil(void *ctx, size_t size) { - return tracemalloc_alloc_gil(0, ctx, 1, size); + return tracemalloc_alloc(0, 0, ctx, 1, size); } static void* tracemalloc_calloc_gil(void *ctx, size_t nelem, size_t elsize) { - return tracemalloc_alloc_gil(1, ctx, nelem, elsize); + return tracemalloc_alloc(0, 1, ctx, nelem, elsize); } static void* tracemalloc_realloc_gil(void *ctx, void *ptr, size_t new_size) { - void *ptr2; - - if (get_reentrant()) { - /* Reentrant call to PyMem_Realloc() and PyMem_RawRealloc(). - Example: PyMem_RawRealloc() is called internally by pymalloc - (_PyObject_Malloc() and _PyObject_Realloc()) to allocate a new - arena (new_arena()). */ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - - ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - if (ptr2 != NULL && ptr != NULL) { - TABLES_LOCK(); - REMOVE_TRACE(ptr); - TABLES_UNLOCK(); - } - return ptr2; - } - - /* Ignore reentrant call. PyObjet_Realloc() calls PyMem_Realloc() for - allocations larger than 512 bytes. Don't trace the same memory - allocation twice. */ - set_reentrant(1); - - ptr2 = tracemalloc_realloc(ctx, ptr, new_size); - - set_reentrant(0); - return ptr2; -} - - -#ifdef TRACE_RAW_MALLOC -static void* -tracemalloc_raw_alloc(int use_calloc, void *ctx, size_t nelem, size_t elsize) -{ - PyGILState_STATE gil_state; - void *ptr; - - if (get_reentrant()) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (use_calloc) - return alloc->calloc(alloc->ctx, nelem, elsize); - else - return alloc->malloc(alloc->ctx, nelem * elsize); - } - - /* Ignore reentrant call. PyGILState_Ensure() may call PyMem_RawMalloc() - indirectly which would call PyGILState_Ensure() if reentrant are not - disabled. */ - set_reentrant(1); - - gil_state = PyGILState_Ensure(); - ptr = tracemalloc_alloc(use_calloc, ctx, nelem, elsize); - PyGILState_Release(gil_state); - - set_reentrant(0); - return ptr; + return tracemalloc_realloc(0, ctx, ptr, new_size); } static void* tracemalloc_raw_malloc(void *ctx, size_t size) { - return tracemalloc_raw_alloc(0, ctx, 1, size); + return tracemalloc_alloc(1, 0, ctx, 1, size); } static void* tracemalloc_raw_calloc(void *ctx, size_t nelem, size_t elsize) { - return tracemalloc_raw_alloc(1, ctx, nelem, elsize); + return tracemalloc_alloc(1, 1, ctx, nelem, elsize); } static void* tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size) { - PyGILState_STATE gil_state; - void *ptr2; - - if (get_reentrant()) { - /* Reentrant call to PyMem_RawRealloc(). */ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - - ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - - if (ptr2 != NULL && ptr != NULL) { - TABLES_LOCK(); - REMOVE_TRACE(ptr); - TABLES_UNLOCK(); - } - return ptr2; - } - - /* Ignore reentrant call. PyGILState_Ensure() may call PyMem_RawMalloc() - indirectly which would call PyGILState_Ensure() if reentrant calls are - not disabled. */ - set_reentrant(1); - - gil_state = PyGILState_Ensure(); - ptr2 = tracemalloc_realloc(ctx, ptr, new_size); - PyGILState_Release(gil_state); - - set_reentrant(0); - return ptr2; + return tracemalloc_realloc(1, ctx, ptr, new_size); } -#endif /* TRACE_RAW_MALLOC */ static void @@ -777,60 +711,36 @@ tracemalloc_clear_filename(void *value) } -/* reentrant flag must be set to call this function and GIL must be held */ static void -tracemalloc_clear_traces(void) +tracemalloc_clear_traces_unlocked(void) { - /* The GIL protects variables against concurrent access */ - assert(PyGILState_Check()); + // Clearing tracemalloc_filenames requires the GIL to call Py_DECREF() + _Py_AssertHoldsTstate(); + + set_reentrant(1); - TABLES_LOCK(); _Py_hashtable_clear(tracemalloc_traces); _Py_hashtable_clear(tracemalloc_domains); + _Py_hashtable_clear(tracemalloc_tracebacks); + _Py_hashtable_clear(tracemalloc_filenames); + tracemalloc_traced_memory = 0; tracemalloc_peak_traced_memory = 0; - TABLES_UNLOCK(); - - _Py_hashtable_clear(tracemalloc_tracebacks); - _Py_hashtable_clear(tracemalloc_filenames); + set_reentrant(0); } -int +PyStatus _PyTraceMalloc_Init(void) { - if (tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) { - PyErr_SetString(PyExc_RuntimeError, - "the tracemalloc module has been unloaded"); - return -1; - } - - if (tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED) - return 0; + assert(tracemalloc_config.initialized == TRACEMALLOC_NOT_INITIALIZED); PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); -#ifdef REENTRANT_THREADLOCAL if (PyThread_tss_create(&tracemalloc_reentrant_key) != 0) { -#ifdef MS_WINDOWS - PyErr_SetFromWindowsErr(0); -#else - PyErr_SetFromErrno(PyExc_OSError); -#endif - return -1; - } -#endif - -#if defined(TRACE_RAW_MALLOC) - if (tables_lock == NULL) { - tables_lock = PyThread_allocate_lock(); - if (tables_lock == NULL) { - PyErr_SetString(PyExc_RuntimeError, "cannot allocate lock"); - return -1; - } + return _PyStatus_NO_MEMORY(); } -#endif tracemalloc_filenames = hashtable_new(hashtable_hash_pyobject, hashtable_compare_unicode, @@ -844,9 +754,9 @@ _PyTraceMalloc_Init(void) tracemalloc_domains = tracemalloc_create_domains_table(); if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL - || tracemalloc_traces == NULL || tracemalloc_domains == NULL) { - PyErr_NoMemory(); - return -1; + || tracemalloc_traces == NULL || tracemalloc_domains == NULL) + { + return _PyStatus_NO_MEMORY(); } tracemalloc_empty_traceback.nframe = 1; @@ -857,7 +767,7 @@ _PyTraceMalloc_Init(void) tracemalloc_empty_traceback.hash = traceback_hash(&tracemalloc_empty_traceback); tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED; - return 0; + return _PyStatus_OK(); } @@ -876,25 +786,13 @@ tracemalloc_deinit(void) _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); -#if defined(TRACE_RAW_MALLOC) - if (tables_lock != NULL) { - PyThread_free_lock(tables_lock); - tables_lock = NULL; - } -#endif - -#ifdef REENTRANT_THREADLOCAL PyThread_tss_delete(&tracemalloc_reentrant_key); -#endif } int _PyTraceMalloc_Start(int max_nframe) { - PyMemAllocatorEx alloc; - size_t size; - if (max_nframe < 1 || (unsigned long) max_nframe > MAX_NFRAME) { PyErr_Format(PyExc_ValueError, "the number of frames must be in range [1; %lu]", @@ -902,23 +800,15 @@ _PyTraceMalloc_Start(int max_nframe) return -1; } - if (_PyTraceMalloc_Init() < 0) { - return -1; - } - - if (PyRefTracer_SetTracer(_PyTraceMalloc_TraceRef, NULL) < 0) { - return -1; - } - - if (tracemalloc_config.tracing) { - /* hook already installed: do nothing */ + if (_PyTraceMalloc_IsTracing()) { + /* hooks already installed: do nothing */ return 0; } tracemalloc_config.max_nframe = max_nframe; /* allocate a buffer to store a new traceback */ - size = TRACEBACK_SIZE(max_nframe); + size_t size = TRACEBACK_SIZE(max_nframe); assert(tracemalloc_traceback == NULL); tracemalloc_traceback = raw_malloc(size); if (tracemalloc_traceback == NULL) { @@ -926,7 +816,7 @@ _PyTraceMalloc_Start(int max_nframe) return -1; } -#ifdef TRACE_RAW_MALLOC + PyMemAllocatorEx alloc; alloc.malloc = tracemalloc_raw_malloc; alloc.calloc = tracemalloc_raw_calloc; alloc.realloc = tracemalloc_raw_realloc; @@ -935,7 +825,6 @@ _PyTraceMalloc_Start(int max_nframe) alloc.ctx = &allocators.raw; PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); -#endif alloc.malloc = tracemalloc_malloc_gil; alloc.calloc = tracemalloc_calloc_gil; @@ -950,8 +839,14 @@ _PyTraceMalloc_Start(int max_nframe) PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &allocators.obj); PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); + if (PyRefTracer_SetTracer(_PyTraceMalloc_TraceRef, NULL) < 0) { + return -1; + } + /* everything is ready: start tracing Python memory allocations */ + TABLES_LOCK(); tracemalloc_config.tracing = 1; + TABLES_UNLOCK(); return 0; } @@ -960,24 +855,30 @@ _PyTraceMalloc_Start(int max_nframe) void _PyTraceMalloc_Stop(void) { - if (!tracemalloc_config.tracing) - return; + TABLES_LOCK(); + + if (!tracemalloc_config.tracing) { + goto done; + } /* stop tracing Python memory allocations */ tracemalloc_config.tracing = 0; /* unregister the hook on memory allocators */ -#ifdef TRACE_RAW_MALLOC PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); -#endif PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &allocators.mem); PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &allocators.obj); - tracemalloc_clear_traces(); + tracemalloc_clear_traces_unlocked(); /* release memory */ raw_free(tracemalloc_traceback); tracemalloc_traceback = NULL; + + (void)PyRefTracer_SetTracer(NULL, NULL); + +done: + TABLES_UNLOCK(); } @@ -985,15 +886,16 @@ _PyTraceMalloc_Stop(void) static PyObject* frame_to_pyobject(frame_t *frame) { - PyObject *frame_obj, *lineno_obj; + assert(get_reentrant()); - frame_obj = PyTuple_New(2); - if (frame_obj == NULL) + PyObject *frame_obj = PyTuple_New(2); + if (frame_obj == NULL) { return NULL; + } PyTuple_SET_ITEM(frame_obj, 0, Py_NewRef(frame->filename)); - lineno_obj = PyLong_FromUnsignedLong(frame->lineno); + PyObject *lineno_obj = PyLong_FromUnsignedLong(frame->lineno); if (lineno_obj == NULL) { Py_DECREF(frame_obj); return NULL; @@ -1008,7 +910,6 @@ static PyObject* traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) { PyObject *frames; - if (intern_table != NULL) { frames = _Py_hashtable_get(intern_table, (const void *)traceback); if (frames) { @@ -1017,8 +918,9 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) } frames = PyTuple_New(traceback->nframe); - if (frames == NULL) + if (frames == NULL) { return NULL; + } for (int i=0; i < traceback->nframe; i++) { PyObject *frame = frame_to_pyobject(&traceback->frames[i]); @@ -1046,14 +948,14 @@ static PyObject* trace_to_pyobject(unsigned int domain, const trace_t *trace, _Py_hashtable_t *intern_tracebacks) { - PyObject *trace_obj = NULL; - PyObject *obj; + assert(get_reentrant()); - trace_obj = PyTuple_New(4); - if (trace_obj == NULL) + PyObject *trace_obj = PyTuple_New(4); + if (trace_obj == NULL) { return NULL; + } - obj = PyLong_FromSize_t(domain); + PyObject *obj = PyLong_FromSize_t(domain); if (obj == NULL) { Py_DECREF(trace_obj); return NULL; @@ -1100,7 +1002,6 @@ tracemalloc_copy_trace(_Py_hashtable_t *traces, void *user_data) { _Py_hashtable_t *traces2 = (_Py_hashtable_t *)user_data; - trace_t *trace = (trace_t *)value; trace_t *trace2 = raw_malloc(sizeof(trace_t)); @@ -1141,7 +1042,6 @@ tracemalloc_copy_domain(_Py_hashtable_t *domains, void *user_data) { _Py_hashtable_t *domains2 = (_Py_hashtable_t *)user_data; - unsigned int domain = (unsigned int)FROM_PTR(key); _Py_hashtable_t *traces = (_Py_hashtable_t *)value; @@ -1182,7 +1082,6 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, void *user_data) { get_traces_t *get_traces = user_data; - const trace_t *trace = (const trace_t *)value; PyObject *tuple = trace_to_pyobject(get_traces->domain, trace, @@ -1196,7 +1095,6 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, if (res < 0) { return 1; } - return 0; } @@ -1207,7 +1105,6 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, void *user_data) { get_traces_t *get_traces = user_data; - unsigned int domain = (unsigned int)FROM_PTR(key); _Py_hashtable_t *traces = (_Py_hashtable_t *)value; @@ -1227,27 +1124,21 @@ tracemalloc_pyobject_decref(void *value) static traceback_t* -tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) +tracemalloc_get_traceback_unlocked(unsigned int domain, uintptr_t ptr) { - - if (!tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) { return NULL; + } - trace_t *trace; - TABLES_LOCK(); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); - if (traces) { - trace = _Py_hashtable_get(traces, TO_PTR(ptr)); - } - else { - trace = NULL; + if (!traces) { + return NULL; } - TABLES_UNLOCK(); + trace_t *trace = _Py_hashtable_get(traces, TO_PTR(ptr)); if (!trace) { return NULL; } - return trace->traceback; } @@ -1269,24 +1160,28 @@ _PyMem_DumpFrame(int fd, frame_t * frame) void _PyMem_DumpTraceback(int fd, const void *ptr) { - traceback_t *traceback; - int i; - + TABLES_LOCK(); if (!tracemalloc_config.tracing) { PUTS(fd, "Enable tracemalloc to get the memory block " "allocation traceback\n\n"); - return; + goto done; } - traceback = tracemalloc_get_traceback(DEFAULT_DOMAIN, (uintptr_t)ptr); - if (traceback == NULL) - return; + traceback_t *traceback; + traceback = tracemalloc_get_traceback_unlocked(DEFAULT_DOMAIN, + (uintptr_t)ptr); + if (traceback == NULL) { + goto done; + } PUTS(fd, "Memory block allocated at (most recent call first):\n"); - for (i=0; i < traceback->nframe; i++) { + for (int i=0; i < traceback->nframe; i++) { _PyMem_DumpFrame(fd, &traceback->frames[i]); } PUTS(fd, "\n"); + +done: + TABLES_UNLOCK(); } #undef PUTS @@ -1307,45 +1202,48 @@ int PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, size_t size) { - int res; - PyGILState_STATE gil_state; + PyGILState_STATE gil_state = PyGILState_Ensure(); + TABLES_LOCK(); - if (!tracemalloc_config.tracing) { + int result; + if (tracemalloc_config.tracing) { + result = tracemalloc_add_trace_unlocked(domain, ptr, size); + } + else { /* tracemalloc is not tracing: do nothing */ - return -2; + result = -2; } - gil_state = PyGILState_Ensure(); - - TABLES_LOCK(); - res = tracemalloc_add_trace(domain, ptr, size); TABLES_UNLOCK(); - PyGILState_Release(gil_state); - return res; + return result; } int PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr) { - if (!tracemalloc_config.tracing) { + TABLES_LOCK(); + + int result; + if (tracemalloc_config.tracing) { + tracemalloc_remove_trace_unlocked(domain, ptr); + result = 0; + } + else { /* tracemalloc is not tracing: do nothing */ - return -2; + result = -2; } - TABLES_LOCK(); - tracemalloc_remove_trace(domain, ptr); TABLES_UNLOCK(); - - return 0; + return result; } void _PyTraceMalloc_Fini(void) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); tracemalloc_deinit(); } @@ -1355,87 +1253,102 @@ _PyTraceMalloc_Fini(void) Do nothing if tracemalloc is not tracing memory allocations or if the object memory block is not already traced. */ -int -_PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void* Py_UNUSED(ignore)) +static int +_PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, + void* Py_UNUSED(ignore)) { if (event != PyRefTracer_CREATE) { return 0; } + if (get_reentrant()) { + return 0; + } - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); + TABLES_LOCK(); if (!tracemalloc_config.tracing) { - /* tracemalloc is not tracing: do nothing */ - return -1; + goto done; } PyTypeObject *type = Py_TYPE(op); const size_t presize = _PyType_PreHeaderSize(type); uintptr_t ptr = (uintptr_t)((char *)op - presize); - int res = -1; - - TABLES_LOCK(); trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr)); if (trace != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { trace->traceback = traceback; - res = 0; } } /* else: cannot track the object, its memory block size is unknown */ - TABLES_UNLOCK(); - return res; +done: + TABLES_UNLOCK(); + return 0; } PyObject* _PyTraceMalloc_GetTraceback(unsigned int domain, uintptr_t ptr) { - traceback_t *traceback; + TABLES_LOCK(); - traceback = tracemalloc_get_traceback(domain, ptr); - if (traceback == NULL) - Py_RETURN_NONE; + traceback_t *traceback = tracemalloc_get_traceback_unlocked(domain, ptr); + PyObject *result; + if (traceback) { + set_reentrant(1); + result = traceback_to_pyobject(traceback, NULL); + set_reentrant(0); + } + else { + result = Py_NewRef(Py_None); + } - return traceback_to_pyobject(traceback, NULL); + TABLES_UNLOCK(); + return result; } int _PyTraceMalloc_IsTracing(void) { - return tracemalloc_config.tracing; + TABLES_LOCK(); + int tracing = tracemalloc_config.tracing; + TABLES_UNLOCK(); + return tracing; } void _PyTraceMalloc_ClearTraces(void) { - - if (!tracemalloc_config.tracing) { - return; + TABLES_LOCK(); + if (tracemalloc_config.tracing) { + tracemalloc_clear_traces_unlocked(); } - set_reentrant(1); - tracemalloc_clear_traces(); - set_reentrant(0); + TABLES_UNLOCK(); } PyObject * _PyTraceMalloc_GetTraces(void) { + TABLES_LOCK(); + set_reentrant(1); + get_traces_t get_traces; get_traces.domain = DEFAULT_DOMAIN; get_traces.traces = NULL; get_traces.domains = NULL; get_traces.tracebacks = NULL; get_traces.list = PyList_New(0); - if (get_traces.list == NULL) - goto error; + if (get_traces.list == NULL) { + goto finally; + } - if (!tracemalloc_config.tracing) - return get_traces.list; + if (!tracemalloc_config.tracing) { + goto finally; + } /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ @@ -1449,24 +1362,17 @@ _PyTraceMalloc_GetTraces(void) // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable // temporarily tracemalloc which would impact other threads and so would // miss allocations while get_traces() is called. - TABLES_LOCK(); get_traces.traces = tracemalloc_copy_traces(tracemalloc_traces); - TABLES_UNLOCK(); - if (get_traces.traces == NULL) { goto no_memory; } - TABLES_LOCK(); get_traces.domains = tracemalloc_copy_domains(tracemalloc_domains); - TABLES_UNLOCK(); - if (get_traces.domains == NULL) { goto no_memory; } // Convert traces to a list of tuples - set_reentrant(1); int err = _Py_hashtable_foreach(get_traces.traces, tracemalloc_get_traces_fill, &get_traces); @@ -1475,20 +1381,22 @@ _PyTraceMalloc_GetTraces(void) tracemalloc_get_traces_domain, &get_traces); } - set_reentrant(0); + if (err) { - goto error; + Py_CLEAR(get_traces.list); + goto finally; } - goto finally; no_memory: PyErr_NoMemory(); - -error: Py_CLEAR(get_traces.list); + goto finally; finally: + set_reentrant(0); + TABLES_UNLOCK(); + if (get_traces.tracebacks != NULL) { _Py_hashtable_destroy(get_traces.tracebacks); } @@ -1506,37 +1414,33 @@ PyObject * _PyTraceMalloc_GetObjectTraceback(PyObject *obj) /*[clinic end generated code: output=41ee0553a658b0aa input=29495f1b21c53212]*/ { - PyTypeObject *type; - traceback_t *traceback; - - type = Py_TYPE(obj); + PyTypeObject *type = Py_TYPE(obj); const size_t presize = _PyType_PreHeaderSize(type); uintptr_t ptr = (uintptr_t)((char *)obj - presize); - - traceback = tracemalloc_get_traceback(DEFAULT_DOMAIN, ptr); - if (traceback == NULL) { - Py_RETURN_NONE; - } - - return traceback_to_pyobject(traceback, NULL); + return _PyTraceMalloc_GetTraceback(DEFAULT_DOMAIN, ptr); } -int _PyTraceMalloc_GetTracebackLimit(void) { +int _PyTraceMalloc_GetTracebackLimit(void) +{ return tracemalloc_config.max_nframe; } size_t -_PyTraceMalloc_GetMemory(void) { - +_PyTraceMalloc_GetMemory(void) +{ + TABLES_LOCK(); size_t size; + if (tracemalloc_config.tracing) { + size = _Py_hashtable_size(tracemalloc_tracebacks); + size += _Py_hashtable_size(tracemalloc_filenames); - size = _Py_hashtable_size(tracemalloc_tracebacks); - size += _Py_hashtable_size(tracemalloc_filenames); - - TABLES_LOCK(); - size += _Py_hashtable_size(tracemalloc_traces); - _Py_hashtable_foreach(tracemalloc_domains, - tracemalloc_get_tracemalloc_memory_cb, &size); + size += _Py_hashtable_size(tracemalloc_traces); + _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_tracemalloc_memory_cb, &size); + } + else { + size = 0; + } TABLES_UNLOCK(); return size; } @@ -1545,26 +1449,27 @@ _PyTraceMalloc_GetMemory(void) { PyObject * _PyTraceMalloc_GetTracedMemory(void) { - Py_ssize_t size, peak_size; - - if (!tracemalloc_config.tracing) - return Py_BuildValue("ii", 0, 0); - TABLES_LOCK(); - size = tracemalloc_traced_memory; - peak_size = tracemalloc_peak_traced_memory; + Py_ssize_t traced, peak; + if (tracemalloc_config.tracing) { + traced = tracemalloc_traced_memory; + peak = tracemalloc_peak_traced_memory; + } + else { + traced = 0; + peak = 0; + } TABLES_UNLOCK(); - return Py_BuildValue("nn", size, peak_size); + return Py_BuildValue("nn", traced, peak); } void _PyTraceMalloc_ResetPeak(void) { - if (!tracemalloc_config.tracing) { - return; - } TABLES_LOCK(); - tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + if (tracemalloc_config.tracing) { + tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + } TABLES_UNLOCK(); } diff --git a/Python/uniqueid.c b/Python/uniqueid.c index b9f30713feeb57..64c3e6cfbbe825 100644 --- a/Python/uniqueid.c +++ b/Python/uniqueid.c @@ -86,7 +86,7 @@ _PyObject_AssignUniqueId(PyObject *obj) if (pool->freelist == NULL) { if (resize_interp_type_id_pool(pool) < 0) { UNLOCK_POOL(pool); - return -1; + return _Py_INVALID_UNIQUE_ID; } } @@ -94,7 +94,9 @@ _PyObject_AssignUniqueId(PyObject *obj) pool->freelist = entry->next; entry->obj = obj; _PyObject_SetDeferredRefcount(obj); - Py_ssize_t unique_id = (entry - pool->table); + // The unique id is one plus the index of the entry in the table. + Py_ssize_t unique_id = (entry - pool->table) + 1; + assert(unique_id > 0); UNLOCK_POOL(pool); return unique_id; } @@ -106,8 +108,9 @@ _PyObject_ReleaseUniqueId(Py_ssize_t unique_id) struct _Py_unique_id_pool *pool = &interp->unique_ids; LOCK_POOL(pool); - assert(unique_id >= 0 && unique_id < pool->size); - _Py_unique_id_entry *entry = &pool->table[unique_id]; + assert(unique_id > 0 && unique_id <= pool->size); + Py_ssize_t idx = unique_id - 1; + _Py_unique_id_entry *entry = &pool->table[idx]; entry->next = pool->freelist; pool->freelist = entry; UNLOCK_POOL(pool); @@ -116,18 +119,18 @@ _PyObject_ReleaseUniqueId(Py_ssize_t unique_id) static Py_ssize_t clear_unique_id(PyObject *obj) { - Py_ssize_t id = -1; + Py_ssize_t id = _Py_INVALID_UNIQUE_ID; if (PyType_Check(obj)) { if (PyType_HasFeature((PyTypeObject *)obj, Py_TPFLAGS_HEAPTYPE)) { PyHeapTypeObject *ht = (PyHeapTypeObject *)obj; id = ht->unique_id; - ht->unique_id = -1; + ht->unique_id = _Py_INVALID_UNIQUE_ID; } } else if (PyCode_Check(obj)) { PyCodeObject *co = (PyCodeObject *)obj; id = co->_co_unique_id; - co->_co_unique_id = -1; + co->_co_unique_id = _Py_INVALID_UNIQUE_ID; } else if (PyDict_Check(obj)) { PyDictObject *mp = (PyDictObject *)obj; @@ -141,23 +144,23 @@ void _PyObject_DisablePerThreadRefcounting(PyObject *obj) { Py_ssize_t id = clear_unique_id(obj); - if (id >= 0) { + if (id != _Py_INVALID_UNIQUE_ID) { _PyObject_ReleaseUniqueId(id); } } void -_PyObject_ThreadIncrefSlow(PyObject *obj, Py_ssize_t unique_id) +_PyObject_ThreadIncrefSlow(PyObject *obj, size_t idx) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET(); - if (unique_id < 0 || resize_local_refcounts(tstate) < 0) { + if (((Py_ssize_t)idx) < 0 || resize_local_refcounts(tstate) < 0) { // just incref the object directly. Py_INCREF(obj); return; } - assert(unique_id < tstate->refcounts.size); - tstate->refcounts.values[unique_id]++; + assert(idx < (size_t)tstate->refcounts.size); + tstate->refcounts.values[idx]++; #ifdef Py_REF_DEBUG _Py_IncRefTotal((PyThreadState *)tstate); #endif @@ -217,7 +220,7 @@ _PyObject_FinalizeUniqueIdPool(PyInterpreterState *interp) if (obj != NULL) { Py_ssize_t id = clear_unique_id(obj); (void)id; - assert(id == i); + assert(id == i + 1); } } PyMem_Free(pool->table); diff --git a/README.rst b/README.rst index 02776205e6dcc9..6f74bc3f9ed7ef 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.14.0 alpha 3 +This is Python version 3.14.0 alpha 4 ===================================== .. image:: https://github.com/python/cpython/actions/workflows/build.yml/badge.svg?branch=main&event=push diff --git a/Tools/build/mypy.ini b/Tools/build/mypy.ini index cf1dac7fde5ac5..0e5d6e874a72e5 100644 --- a/Tools/build/mypy.ini +++ b/Tools/build/mypy.ini @@ -8,6 +8,6 @@ python_version = 3.10 # ...And be strict: strict = True -strict_concatenate = True +extra_checks = True enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined warn_unreachable = True diff --git a/Tools/c-analyzer/c_analyzer/datafiles.py b/Tools/c-analyzer/c_analyzer/datafiles.py index d5db3bd3ed74ac..79c201a5d3b92c 100644 --- a/Tools/c-analyzer/c_analyzer/datafiles.py +++ b/Tools/c-analyzer/c_analyzer/datafiles.py @@ -104,7 +104,12 @@ def _iter_ignored(infile, relroot): for v in varidinfo) if reason in bogus: reason = None - varid = _info.DeclID.from_row(varidinfo) + try: + varid = _info.DeclID.from_row(varidinfo) + except BaseException as e: + e.add_note(f"Error occurred when processing row {varidinfo} in {infile}.") + e.add_note(f"Could it be that you added a row which is not tab-delimited?") + raise e varid = varid.fix_filename(relroot, formatted=False, fixroot=False) yield varid, reason diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index a74779803228c2..54954cfb5f83ff 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -407,7 +407,8 @@ Modules/_tkinter.c - trbInCmd - ## other Include/datetime.h - PyDateTimeAPI - -Modules/_ctypes/cfield.c _ctypes_get_fielddesc initialized - +Modules/_ctypes/cfield.c _ctypes_init_fielddesc initialized - +Modules/_ctypes/cfield.c - formattable - Modules/_ctypes/malloc_closure.c - _pagesize - Modules/_cursesmodule.c - curses_module_loaded - Modules/_cursesmodule.c - curses_initscr_called - @@ -422,7 +423,6 @@ Modules/readline.c - libedit_history_start - ##----------------------- ## state -Modules/_ctypes/cfield.c - formattable - Modules/_ctypes/malloc_closure.c - free_list - Modules/_curses_panel.c - lop - Modules/_ssl/debughelpers.c _PySSL_keylog_callback lock - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index c8c30a7985aa2e..be3ded9f07ef8a 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -53,6 +53,9 @@ Python/pyhash.c - _Py_HashSecret - ## thread-safe hashtable (internal locks) Python/parking_lot.c - buckets - +## data needed for introspecting asyncio state from debuggers and profilers +Modules/_asynciomodule.c - AsyncioDebug - + ################################## ## state tied to Py_Main() @@ -378,16 +381,15 @@ Python/pylifecycle.c - INTERPRETER_TRAMPOLINE_CODEDEF - Python/pystate.c - initial - Python/specialize.c - adaptive_opcodes - Python/specialize.c - cache_requirements - +Python/specialize.c - compactlongs_specs - +Python/specialize.c - float_compactlong_specs - +Python/specialize.c - compactlong_float_specs - Python/stdlib_module_names.h - _Py_stdlib_module_names - Python/sysmodule.c - perf_map_state - Python/sysmodule.c - _PySys_ImplCacheTag - Python/sysmodule.c - _PySys_ImplName - Python/sysmodule.c - whatstrings - -Python/optimizer.c - _PyDefaultOptimizer_Type - -Python/optimizer.c - _PyCounterExecutor_Type - -Python/optimizer.c - _PyCounterOptimizer_Type - Python/optimizer.c - _PyUOpExecutor_Type - -Python/optimizer.c - _PyUOpOptimizer_Type - Python/optimizer.c - _PyOptimizer_Default - Python/optimizer.c - _ColdExit_Type - Python/optimizer.c - Py_FatalErrorExecutor - @@ -444,6 +446,8 @@ Modules/_testcapi/exceptions.c - PyRecursingInfinitelyError_Type - Modules/_testcapi/heaptype.c - _testcapimodule - Modules/_testcapi/mem.c - FmData - Modules/_testcapi/mem.c - FmHook - +Modules/_testcapi/object.c - MyObject_dealloc_called - +Modules/_testcapi/object.c - MyType - Modules/_testcapi/structmember.c - test_structmembersType_OldAPI - Modules/_testcapi/watchers.c - g_dict_watch_events - Modules/_testcapi/watchers.c - g_dict_watchers_installed - diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index eca851e6de87ae..afb20b0330dd88 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -5,9 +5,17 @@ import re from typing import Optional +@dataclass +class EscapingCall: + start: lexer.Token + call: lexer.Token + end: lexer.Token + kills: lexer.Token | None + @dataclass class Properties: - escaping_calls: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] + escaping_calls: dict[lexer.Token, EscapingCall] + escapes: bool error_with_pop: bool error_without_pop: bool deopts: bool @@ -27,6 +35,7 @@ class Properties: oparg_and_1: bool = False const_oparg: int = -1 needs_prev: bool = False + no_save_ip: bool = False def dump(self, indent: str) -> None: simple_properties = self.__dict__.copy() @@ -39,11 +48,12 @@ def dump(self, indent: str) -> None: @staticmethod def from_list(properties: list["Properties"]) -> "Properties": - escaping_calls: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] = {} + escaping_calls: dict[lexer.Token, EscapingCall] = {} for p in properties: escaping_calls.update(p.escaping_calls) return Properties( escaping_calls=escaping_calls, + escapes = any(p.escapes for p in properties), error_with_pop=any(p.error_with_pop for p in properties), error_without_pop=any(p.error_without_pop for p in properties), deopts=any(p.deopts for p in properties), @@ -60,18 +70,16 @@ def from_list(properties: list["Properties"]) -> "Properties": side_exit=any(p.side_exit for p in properties), pure=all(p.pure for p in properties), needs_prev=any(p.needs_prev for p in properties), + no_save_ip=all(p.no_save_ip for p in properties), ) @property def infallible(self) -> bool: return not self.error_with_pop and not self.error_without_pop - @property - def escapes(self) -> bool: - return bool(self.escaping_calls) - SKIP_PROPERTIES = Properties( escaping_calls={}, + escapes=False, error_with_pop=False, error_without_pop=False, deopts=False, @@ -87,6 +95,7 @@ def escapes(self) -> bool: has_free=False, side_exit=False, pure=True, + no_save_ip=False, ) @@ -256,6 +265,12 @@ def is_super(self) -> bool: return False +@dataclass +class Label: + name: str + body: list[lexer.Token] + + @dataclass class PseudoInstruction: name: str @@ -289,6 +304,7 @@ class Analysis: uops: dict[str, Uop] families: dict[str, Family] pseudos: dict[str, PseudoInstruction] + labels: dict[str, Label] opmap: dict[str, int] have_arg: int min_instrumented: int @@ -321,6 +337,17 @@ def convert_stack_item( cond = replace_op_arg_1 return StackItem(item.name, item.type, cond, item.size) +def check_unused(stack: list[StackItem], input_names: dict[str, lexer.Token]) -> None: + "Unused items cannot be on the stack above used, non-peek items" + seen_unused = False + for item in reversed(stack): + if item.name == "unused": + seen_unused = True + elif item.peek: + break + elif seen_unused: + raise analysis_error(f"Cannot have used input '{item.name}' below an unused value on the stack", input_names[item.name]) + def analyze_stack( op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None @@ -365,6 +392,7 @@ def analyze_stack( for output in outputs: if variable_used(op, output.name): output.used = True + check_unused(inputs, input_names) return StackEffect(inputs, outputs) @@ -384,7 +412,7 @@ def find_assignment_target(node: parser.InstDef, idx: int) -> list[lexer.Token]: """Find the tokens that make up the left-hand side of an assignment""" offset = 0 for tkn in reversed(node.block.tokens[: idx]): - if tkn.kind in {"SEMI", "LBRACE", "RBRACE"}: + if tkn.kind in {"SEMI", "LBRACE", "RBRACE", "CMACRO"}: return node.block.tokens[idx - offset : idx] offset += 1 return [] @@ -502,7 +530,6 @@ def has_error_with_pop(op: parser.InstDef) -> bool: variable_used(op, "ERROR_IF") or variable_used(op, "pop_1_error") or variable_used(op, "exception_unwind") - or variable_used(op, "resume_with_error") ) @@ -511,7 +538,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: variable_used(op, "ERROR_NO_POP") or variable_used(op, "pop_1_error") or variable_used(op, "exception_unwind") - or variable_used(op, "resume_with_error") ) @@ -541,7 +567,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "PyStackRef_AsPyObjectNew", "PyStackRef_AsPyObjectSteal", "PyStackRef_CLEAR", - "PyStackRef_CLOSE", "PyStackRef_CLOSE_SPECIALIZED", "PyStackRef_DUP", "PyStackRef_False", @@ -565,7 +590,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "PyUnicode_READ_CHAR", "Py_ARRAY_LENGTH", "Py_CLEAR", - "Py_DECREF", "Py_FatalError", "Py_INCREF", "Py_IS_TYPE", @@ -575,7 +599,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "Py_TYPE", "Py_UNREACHABLE", "Py_Unicode_GET_LENGTH", - "Py_XDECREF", "_PyCode_CODE", "_PyDictValues_AddToInsertionOrder", "_PyErr_Occurred", @@ -590,12 +613,13 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_PyGen_GetGeneratorFromFrame", "_PyInterpreterState_GET", "_PyList_AppendTakeRef", - "_PyList_FromStackRefSteal", + "_PyList_FromStackRefStealOnSuccess", "_PyList_ITEMS", "_PyLong_Add", "_PyLong_CompactValue", "_PyLong_DigitCount", "_PyLong_IsCompact", + "_PyLong_IsNegative", "_PyLong_IsNonNegativeCompact", "_PyLong_IsZero", "_PyLong_Multiply", @@ -608,8 +632,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_PyObject_InlineValues", "_PyObject_ManagedDictPointer", "_PyThreadState_HasStackSpace", - "_PyTuple_FromArraySteal", - "_PyTuple_FromStackRefSteal", + "_PyTuple_FromStackRefStealOnSuccess", "_PyTuple_ITEMS", "_PyType_HasFeature", "_PyType_NewManagedObject", @@ -617,7 +640,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_PyUnicode_JoinArray", "_Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY", "_Py_DECREF_NO_DEALLOC", - "_Py_DECREF_SPECIALIZED", "_Py_EnterRecursiveCallTstateUnchecked", "_Py_ID", "_Py_IsImmortal", @@ -628,6 +650,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_Py_STR", "_Py_TryIncrefCompare", "_Py_TryIncrefCompareStackRef", + "_Py_atomic_compare_exchange_uint8", "_Py_atomic_load_ptr_acquire", "_Py_atomic_load_uintptr_relaxed", "_Py_set_eval_breaker_bit", @@ -660,15 +683,15 @@ def find_stmt_end(node: parser.InstDef, idx: int) -> lexer.Token: if tkn.kind == "SEMI": return node.block.tokens[idx+1] -def check_escaping_calls(instr: parser.InstDef, escapes: dict[lexer.Token, tuple[lexer.Token, lexer.Token]]) -> None: - calls = {escapes[t][0] for t in escapes} +def check_escaping_calls(instr: parser.InstDef, escapes: dict[lexer.Token, EscapingCall]) -> None: + calls = {e.call for e in escapes.values()} in_if = 0 tkn_iter = iter(instr.block.tokens) for tkn in tkn_iter: if tkn.kind == "IF": next(tkn_iter) in_if = 1 - if tkn.kind == "IDENTIFIER" and tkn.text in ("DEOPT_IF", "ERROR_IF"): + if tkn.kind == "IDENTIFIER" and tkn.text in ("DEOPT_IF", "ERROR_IF", "EXIT_IF"): next(tkn_iter) in_if = 1 elif tkn.kind == "LPAREN" and in_if: @@ -679,8 +702,8 @@ def check_escaping_calls(instr: parser.InstDef, escapes: dict[lexer.Token, tuple elif tkn in calls and in_if: raise analysis_error(f"Escaping call '{tkn.text} in condition", tkn) -def find_escaping_api_calls(instr: parser.InstDef) -> dict[lexer.Token, tuple[lexer.Token, lexer.Token]]: - result: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] = {} +def find_escaping_api_calls(instr: parser.InstDef) -> dict[lexer.Token, EscapingCall]: + result: dict[lexer.Token, EscapingCall] = {} tokens = instr.block.tokens for idx, tkn in enumerate(tokens): try: @@ -715,9 +738,17 @@ def find_escaping_api_calls(instr: parser.InstDef) -> dict[lexer.Token, tuple[le continue elif tkn.kind != "RBRACKET": continue + if tkn.text in ("PyStackRef_CLOSE", "PyStackRef_XCLOSE"): + if len(tokens) <= idx+2: + raise analysis_error("Unexpected end of file", next_tkn) + kills = tokens[idx+2] + if kills.kind != "IDENTIFIER": + raise analysis_error(f"Expected identifier, got '{kills.text}'", kills) + else: + kills = None start = find_stmt_start(instr, idx) end = find_stmt_end(instr, idx) - result[start] = tkn, end + result[start] = EscapingCall(start, tkn, end, kills) check_escaping_calls(instr, result) return result @@ -811,8 +842,16 @@ def compute_properties(op: parser.InstDef) -> Properties: ) error_with_pop = has_error_with_pop(op) error_without_pop = has_error_without_pop(op) + escapes = ( + bool(escaping_calls) or + variable_used(op, "Py_DECREF") or + variable_used(op, "Py_XDECREF") or + variable_used(op, "Py_CLEAR") or + variable_used(op, "SETLOCAL") + ) return Properties( escaping_calls=escaping_calls, + escapes=escapes, error_with_pop=error_with_pop, error_without_pop=error_without_pop, deopts=deopts_if, @@ -829,6 +868,7 @@ def compute_properties(op: parser.InstDef) -> Properties: and not has_free, has_free=has_free, pure="pure" in op.annotations, + no_save_ip="no_save_ip" in op.annotations, tier=tier_variable(op), needs_prev=variable_used(op, "prev_instr"), ) @@ -1003,6 +1043,13 @@ def add_pseudo( ) +def add_label( + label: parser.LabelDef, + labels: dict[str, Label], +) -> None: + labels[label.name] = Label(label.name, label.block.tokens) + + def assign_opcodes( instructions: dict[str, Instruction], families: dict[str, Family], @@ -1121,6 +1168,7 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: uops: dict[str, Uop] = {} families: dict[str, Family] = {} pseudos: dict[str, PseudoInstruction] = {} + labels: dict[str, Label] = {} for node in forest: match node: case parser.InstDef(name): @@ -1135,6 +1183,8 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: pass case parser.Pseudo(): pass + case parser.LabelDef(): + pass case _: assert False for node in forest: @@ -1146,6 +1196,8 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: add_family(node, instructions, families) case parser.Pseudo(): add_pseudo(node, instructions, pseudos) + case parser.LabelDef(): + add_label(node, labels) case _: pass for uop in uops.values(): @@ -1171,7 +1223,7 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: families["BINARY_OP"].members.append(inst) opmap, first_arg, min_instrumented = assign_opcodes(instructions, families, pseudos) return Analysis( - instructions, uops, families, pseudos, opmap, first_arg, min_instrumented + instructions, uops, families, pseudos, labels, opmap, first_arg, min_instrumented ) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index d17617cab0266b..d3eb948f3563e7 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -98,6 +98,11 @@ def always_true(tkn: Token | None) -> bool: return False return tkn.text in {"true", "1"} +NON_ESCAPING_DEALLOCS = { + "_PyFloat_ExactDealloc", + "_PyLong_ExactDealloc", + "_PyUnicode_ExactDealloc", +} class Emitter: out: CWriter @@ -115,12 +120,12 @@ def __init__(self, out: CWriter): "SYNC_SP": self.sync_sp, "SAVE_STACK": self.save_stack, "RELOAD_STACK": self.reload_stack, - "PyStackRef_CLOSE": self.stackref_close, - "PyStackRef_CLOSE_SPECIALIZED": self.stackref_close, + "PyStackRef_CLOSE_SPECIALIZED": self.stackref_close_specialized, "PyStackRef_AsPyObjectSteal": self.stackref_steal, "DISPATCH": self.dispatch, "INSTRUCTION_SIZE": self.instruction_size, - "POP_DEAD_INPUTS": self.pop_dead_inputs, + "POP_INPUT": self.pop_input, + "GO_TO_INSTRUCTION": self.go_to_instruction, } self.out = out @@ -159,6 +164,13 @@ def deopt_if( exit_if = deopt_if + def goto_error(self, offset: int, label: str, storage: Storage) -> str: + if offset > 0: + return f"goto pop_{offset}_{label};" + if offset < 0: + storage.copy().flush(self.out) + return f"goto {label};" + def error_if( self, tkn: Token, @@ -181,30 +193,20 @@ def error_if( self.out.emit_at("if ", tkn) self.emit(lparen) emit_to(self.out, tkn_iter, "COMMA") - self.out.emit(") ") + self.out.emit(") {\n") label = next(tkn_iter).text next(tkn_iter) # RPAREN next(tkn_iter) # Semi colon storage.clear_inputs("at ERROR_IF") + c_offset = storage.stack.peek_offset() try: offset = -int(c_offset) except ValueError: offset = -1 - if offset > 0: - self.out.emit(f"goto pop_{offset}_") - self.out.emit(label) - self.out.emit(";\n") - elif offset == 0: - self.out.emit("goto ") - self.out.emit(label) - self.out.emit(";\n") - else: - self.out.emit("{\n") - storage.copy().flush(self.out) - self.out.emit("goto ") - self.out.emit(label) - self.out.emit(";\n") + self.out.emit(self.goto_error(offset, label, storage)) + self.out.emit("\n") + if not unconditional: self.out.emit("}\n") return not unconditional @@ -219,7 +221,7 @@ def error_no_pop( next(tkn_iter) # LPAREN next(tkn_iter) # RPAREN next(tkn_iter) # Semi colon - self.out.emit_at("goto error;", tkn) + self.out.emit_at(self.goto_error(0, "error", storage), tkn) return False def decref_inputs( @@ -234,23 +236,26 @@ def decref_inputs( next(tkn_iter) next(tkn_iter) self.out.emit_at("", tkn) - for var in uop.stack.inputs: - if var.name == "unused" or var.name == "null" or var.peek: + for var in storage.inputs: + if not var.defined: + continue + if var.name == "null": continue + close = "PyStackRef_CLOSE" + if "null" in var.name or var.condition and var.condition != "1": + close = "PyStackRef_XCLOSE" if var.size: if var.size == "1": - self.out.emit(f"PyStackRef_CLOSE({var.name}[0]);\n") + self.out.emit(f"{close}({var.name}[0]);\n") else: self.out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n") - self.out.emit(f"PyStackRef_CLOSE({var.name}[_i]);\n") + self.out.emit(f"{close}({var.name}[_i]);\n") self.out.emit("}\n") elif var.condition: - if var.condition == "1": - self.out.emit(f"PyStackRef_CLOSE({var.name});\n") - elif var.condition != "0": - self.out.emit(f"PyStackRef_XCLOSE({var.name});\n") + if var.condition != "0": + self.out.emit(f"{close}({var.name});\n") else: - self.out.emit(f"PyStackRef_CLOSE({var.name});\n") + self.out.emit(f"{close}({var.name});\n") for input in storage.inputs: input.defined = False return True @@ -291,7 +296,26 @@ def kill( raise analysis_error(f"'{name}' is not a live input-only variable", name_tkn) return True - def stackref_close( + def stackref_kill( + self, + name: Token, + storage: Storage, + escapes: bool + ) -> bool: + live = "" + for var in reversed(storage.inputs): + if var.name == name.text: + if live and escapes: + raise analysis_error( + f"Cannot close '{name.text}' when " + f"'{live}' is still live", name) + var.defined = False + break + if var.defined: + live = var.name + return True + + def stackref_close_specialized( self, tkn: Token, tkn_iter: TokenIterator, @@ -299,21 +323,47 @@ def stackref_close( storage: Storage, inst: Instruction | None, ) -> bool: + self.out.emit(tkn) tkn = next(tkn_iter) assert tkn.kind == "LPAREN" self.out.emit(tkn) name = next(tkn_iter) self.out.emit(name) + comma = next(tkn_iter) + if comma.kind != "COMMA": + raise analysis_error("Expected comma", comma) + self.out.emit(comma) + dealloc = next(tkn_iter) + if dealloc.kind != "IDENTIFIER": + raise analysis_error("Expected identifier", dealloc) + self.out.emit(dealloc) if name.kind == "IDENTIFIER": - for var in storage.inputs: - if var.name == name.text: - var.defined = False + escapes = dealloc.text not in NON_ESCAPING_DEALLOCS + return self.stackref_kill(name, storage, escapes) rparen = emit_to(self.out, tkn_iter, "RPAREN") self.emit(rparen) return True - stackref_steal = stackref_close + def stackref_steal( + self, + tkn: Token, + tkn_iter: TokenIterator, + uop: Uop, + storage: Storage, + inst: Instruction | None, + ) -> bool: + self.out.emit(tkn) + tkn = next(tkn_iter) + assert tkn.kind == "LPAREN" + self.out.emit(tkn) + name = next(tkn_iter) + self.out.emit(name) + if name.kind == "IDENTIFIER": + return self.stackref_kill(name, storage, False) + rparen = emit_to(self.out, tkn_iter, "RPAREN") + self.emit(rparen) + return True def sync_sp( self, @@ -331,6 +381,23 @@ def sync_sp( self._print_storage(storage) return True + def go_to_instruction( + self, + tkn: Token, + tkn_iter: TokenIterator, + uop: Uop, + storage: Storage, + inst: Instruction | None, + ) -> bool: + next(tkn_iter) + name = next(tkn_iter) + next(tkn_iter) + next(tkn_iter) + assert name.kind == "IDENTIFIER" + self.emit("\n") + self.emit(f"goto PREDICTED_{name.text};\n") + return True + def emit_save(self, storage: Storage) -> None: storage.save(self.out) self._print_storage(storage) @@ -349,7 +416,7 @@ def save_stack( self.emit_save(storage) return True - def pop_dead_inputs( + def pop_input( self, tkn: Token, tkn_iter: TokenIterator, @@ -358,9 +425,18 @@ def pop_dead_inputs( inst: Instruction | None, ) -> bool: next(tkn_iter) + name_tkn = next(tkn_iter) + name = name_tkn.text next(tkn_iter) next(tkn_iter) - storage.pop_dead_inputs(self.out) + if not storage.inputs: + raise analysis_error("stack is empty", tkn) + tos = storage.inputs[-1] + if tos.name != name: + raise analysis_error(f"'{name} is not top of stack", name_tkn) + tos.defined = False + storage.clear_dead_inputs() + storage.flush(self.out) return True def emit_reload(self, storage: Storage) -> None: @@ -489,9 +565,15 @@ def _emit_block( self.out.start_line() line = tkn.line if tkn in escaping_calls: - if tkn != reload: + escape = escaping_calls[tkn] + if escape.kills is not None: + if tkn == reload: + self.emit_reload(storage) + self.stackref_kill(escape.kills, storage, True) + self.emit_save(storage) + elif tkn != reload: self.emit_save(storage) - _, reload = escaping_calls[tkn] + reload = escape.end elif tkn == reload: self.emit_reload(storage) if tkn.kind == "LBRACE": @@ -533,7 +615,6 @@ def _emit_block( raise analysis_error(ex.args[0], tkn) from None raise analysis_error("Expecting closing brace. Reached end of file", tkn) - def emit_tokens( self, uop: Uop, @@ -548,7 +629,7 @@ def emit_tokens( storage.push_outputs() self._print_storage(storage) except StackError as ex: - raise analysis_error(ex.args[0], rbrace) + raise analysis_error(ex.args[0], rbrace) from None return storage def emit(self, txt: str | Token) -> None: @@ -583,6 +664,8 @@ def cflags(p: Properties) -> str: flags.append("HAS_ESCAPES_FLAG") if p.pure: flags.append("HAS_PURE_FLAG") + if p.no_save_ip: + flags.append("HAS_NO_SAVE_IP_FLAG") if p.oparg_and_1: flags.append("HAS_OPARG_AND_1_FLAG") if flags: diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py index 37f96398ff175f..cf3c39762f29cb 100644 --- a/Tools/cases_generator/lexer.py +++ b/Tools/cases_generator/lexer.py @@ -213,6 +213,9 @@ def choice(*opts: str) -> str: # A macro in the DSL MACRO = "MACRO" kwds.append(MACRO) +# A label in the DSL +LABEL = "LABEL" +kwds.append(LABEL) keywords = {name.lower(): name for name in kwds} ANNOTATION = "ANNOTATION" @@ -226,6 +229,7 @@ def choice(*opts: str) -> str: "replicate", "tier1", "tier2", + "no_save_ip", } __all__ = [] diff --git a/Tools/cases_generator/mypy.ini b/Tools/cases_generator/mypy.ini index 8e5a31851c596e..e54349bf54a954 100644 --- a/Tools/cases_generator/mypy.ini +++ b/Tools/cases_generator/mypy.ini @@ -8,7 +8,7 @@ python_version = 3.10 # ...And be strict: strict = True -strict_concatenate = True +extra_checks = True enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined warn_unreachable = True allow_redefinition = True diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py index 1a9849c0cbbb25..453db6905d6842 100644 --- a/Tools/cases_generator/opcode_metadata_generator.py +++ b/Tools/cases_generator/opcode_metadata_generator.py @@ -53,6 +53,7 @@ "PASSTHROUGH", "OPARG_AND_1", "ERROR_NO_POP", + "NO_SAVE_IP", ] @@ -285,8 +286,8 @@ def generate_metadata_table(analysis: Analysis, out: CWriter) -> None: table_size = 256 + len(analysis.pseudos) out.emit("struct opcode_metadata {\n") out.emit("uint8_t valid_entry;\n") - out.emit("int8_t instr_format;\n") - out.emit("int16_t flags;\n") + out.emit("uint8_t instr_format;\n") + out.emit("uint16_t flags;\n") out.emit("};\n\n") out.emit( f"extern const struct opcode_metadata _PyOpcode_opcode_metadata[{table_size}];\n" diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py index d08b621aed552b..5cfec4bfecbf07 100644 --- a/Tools/cases_generator/optimizer_generator.py +++ b/Tools/cases_generator/optimizer_generator.py @@ -36,10 +36,10 @@ def validate_uop(override: Uop, uop: Uop) -> None: def type_name(var: StackItem) -> str: if var.is_array(): - return f"_Py_UopsSymbol **" + return f"JitOptSymbol **" if var.type: return var.type - return f"_Py_UopsSymbol *" + return f"JitOptSymbol *" def declare_variables(uop: Uop, out: CWriter, skip_inputs: bool) -> None: @@ -126,7 +126,7 @@ def write_uop( try: out.start_line() if override: - code_list, storage = Storage.for_uop(stack, prototype, extract_bits=False) + code_list, storage = Storage.for_uop(stack, prototype) for code in code_list: out.emit(code) if debug: @@ -151,11 +151,11 @@ def write_uop( var.defined = False storage = emitter.emit_tokens(override, storage, None) out.start_line() - storage.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=False) + storage.flush(out, cast_type="JitOptSymbol *") else: emit_default(out, uop, stack) out.start_line() - stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=False) + stack.flush(out, cast_type="JitOptSymbol *") except StackError as ex: raise analysis_error(ex.args[0], prototype.body[0]) # from None @@ -198,7 +198,7 @@ def generate_abstract_interpreter( declare_variables(override, out, skip_inputs=False) else: declare_variables(uop, out, skip_inputs=True) - stack = Stack() + stack = Stack(False) write_uop(override, uop, out, stack, debug, skip_inputs=(override is None)) out.start_line() out.emit("break;\n") diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py index db672ad5501f15..68bbb88719e682 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parser.py @@ -3,6 +3,7 @@ Macro, Pseudo, Family, + LabelDef, Parser, Context, CacheEffect, diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index de31d9b232f9df..eb8c8a7ecd32e8 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -150,8 +150,13 @@ class Pseudo(Node): targets: list[str] # opcodes this can be replaced by as_sequence: bool +@dataclass +class LabelDef(Node): + name: str + block: Block -AstNode = InstDef | Macro | Pseudo | Family + +AstNode = InstDef | Macro | Pseudo | Family | LabelDef class Parser(PLexer): @@ -165,6 +170,18 @@ def definition(self) -> AstNode | None: return pseudo if inst := self.inst_def(): return inst + if label := self.label_def(): + return label + return None + + @contextual + def label_def(self) -> LabelDef | None: + if self.expect(lx.LABEL): + if self.expect(lx.LPAREN): + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.RPAREN): + if block := self.block(): + return LabelDef(tkn.text, block) return None @contextual @@ -357,9 +374,12 @@ def uops(self) -> list[UOp] | None: def uop(self) -> UOp | None: if tkn := self.expect(lx.IDENTIFIER): if self.expect(lx.DIVIDE): + sign = 1 + if negate := self.expect(lx.MINUS): + sign = -1 if num := self.expect(lx.NUMBER): try: - size = int(num.text) + size = sign * int(num.text) except ValueError: raise self.make_syntax_error( f"Expected integer, got {num.text!r}" diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 9471fe0e56f7d8..5121837ed8334b 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -224,13 +224,14 @@ def array_or_scalar(var: StackItem | Local) -> str: return "array" if var.is_array() else "scalar" class Stack: - def __init__(self) -> None: + def __init__(self, extract_bits: bool=True) -> None: self.top_offset = StackOffset.empty() self.base_offset = StackOffset.empty() self.variables: list[Local] = [] self.defined: set[str] = set() + self.extract_bits = extract_bits - def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]: + def pop(self, var: StackItem) -> tuple[str, Local]: self.top_offset.pop(var) indirect = "&" if var.is_array() else "" if self.variables: @@ -272,7 +273,7 @@ def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]: return "", Local.unused(var) self.defined.add(var.name) cast = f"({var.type})" if (not indirect and var.type) else "" - bits = ".bits" if cast and extract_bits else "" + bits = ".bits" if cast and self.extract_bits else "" assign = f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}]{bits};" if var.condition: if var.condition == "1": @@ -315,7 +316,7 @@ def _adjust_stack_pointer(self, out: CWriter, number: str) -> None: out.emit("assert(WITHIN_STACK_BOUNDS());\n") def flush( - self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True + self, out: CWriter, cast_type: str = "uintptr_t" ) -> None: out.start_line() var_offset = self.base_offset.copy() @@ -324,7 +325,7 @@ def flush( var.defined and not var.in_memory ): - Stack._do_emit(out, var.item, var_offset, cast_type, extract_bits) + Stack._do_emit(out, var.item, var_offset, cast_type, self.extract_bits) var.in_memory = True var_offset.push(var.item) number = self.top_offset.to_c() @@ -346,7 +347,7 @@ def as_comment(self) -> str: ) def copy(self) -> "Stack": - other = Stack() + other = Stack(self.extract_bits) other.top_offset = self.top_offset.copy() other.base_offset = self.base_offset.copy() other.variables = [var.copy() for var in self.variables] @@ -507,14 +508,10 @@ def locals_cached(self) -> bool: return True return False - def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True) -> None: + def flush(self, out: CWriter, cast_type: str = "uintptr_t") -> None: self.clear_dead_inputs() self._push_defined_outputs() - self.stack.flush(out, cast_type, extract_bits) - - def pop_dead_inputs(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True) -> None: - self.clear_dead_inputs() - self.stack.flush(out, cast_type, extract_bits) + self.stack.flush(out, cast_type) def save(self, out: CWriter) -> None: assert self.spilled >= 0 @@ -534,12 +531,12 @@ def reload(self, out: CWriter) -> None: out.emit("stack_pointer = _PyFrame_GetStackPointer(frame);\n") @staticmethod - def for_uop(stack: Stack, uop: Uop, extract_bits: bool = True) -> tuple[list[str], "Storage"]: + def for_uop(stack: Stack, uop: Uop) -> tuple[list[str], "Storage"]: code_list: list[str] = [] inputs: list[Local] = [] peeks: list[Local] = [] for input in reversed(uop.stack.inputs): - code, local = stack.pop(input, extract_bits) + code, local = stack.pop(input) code_list.append(code) if input.peek: peeks.append(local) diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index fcdd3bdacd0e7a..13430524b26dcd 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -32,6 +32,10 @@ FOOTER = "#undef TIER_ONE\n" +INSTRUCTION_START_MARKER = "/* BEGIN INSTRUCTIONS */" +INSTRUCTION_END_MARKER = "/* END INSTRUCTIONS */" +LABEL_START_MARKER = "/* BEGIN LABELS */" +LABEL_END_MARKER = "/* END LABELS */" def declare_variable(var: StackItem, out: CWriter) -> None: @@ -133,13 +137,64 @@ def generate_tier1( ) -> None: write_header(__file__, filenames, outfile) outfile.write( - """ + f""" #ifdef TIER_TWO #error "This file is for Tier 1 only" #endif #define TIER_ONE 1 + +#if !USE_COMPUTED_GOTOS + dispatch_opcode: + switch (opcode) +#endif + {{ + {INSTRUCTION_START_MARKER} """ ) + generate_tier1_cases(analysis, outfile, lines) + outfile.write(f""" + {INSTRUCTION_END_MARKER} +#if USE_COMPUTED_GOTOS + _unknown_opcode: +#else + EXTRA_CASES // From pycore_opcode_metadata.h, a 'case' for each unused opcode +#endif + /* Tell C compilers not to hold the opcode variable in the loop. + next_instr points the current instruction without TARGET(). */ + opcode = next_instr->op.code; + _PyErr_Format(tstate, PyExc_SystemError, + "%U:%d: unknown opcode %d", + _PyFrame_GetCode(frame)->co_filename, + PyUnstable_InterpreterFrame_GetLine(frame), + opcode); + goto error; + + }} + + /* This should never be reached. Every opcode should end with DISPATCH() + or goto error. */ + Py_UNREACHABLE(); + {LABEL_START_MARKER} +""") + generate_tier1_labels(analysis, outfile, lines) + outfile.write(f"{LABEL_END_MARKER}\n") + outfile.write(FOOTER) + +def generate_tier1_labels( + analysis: Analysis, outfile: TextIO, lines: bool +) -> None: + out = CWriter(outfile, 2, lines) + out.emit("\n") + for name, label in analysis.labels.items(): + out.emit(f"{name}:\n") + for tkn in label.body: + out.emit(tkn) + out.emit("\n") + out.emit("\n") + +def generate_tier1_cases( + analysis: Analysis, outfile: TextIO, lines: bool +) -> None: out = CWriter(outfile, 2, lines) emitter = Emitter(out) out.emit("\n") @@ -147,18 +202,21 @@ def generate_tier1( needs_this = uses_this(inst) out.emit("\n") out.emit(f"TARGET({name}) {{\n") - unused_guard = "(void)this_instr;\n" if inst.family is None else "" + unused_guard = "(void)this_instr;\n" if inst.properties.needs_prev: out.emit(f"_Py_CODEUNIT* const prev_instr = frame->instr_ptr;\n") if needs_this and not inst.is_target: - out.emit(f"_Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr;\n") + if inst.properties.no_save_ip: + out.emit(f"_Py_CODEUNIT* const this_instr = next_instr;\n") + else: + out.emit(f"_Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr;\n") out.emit(unused_guard) - else: + elif not inst.properties.no_save_ip: out.emit(f"frame->instr_ptr = next_instr;\n") out.emit(f"next_instr += {inst.size};\n") out.emit(f"INSTRUCTION_STATS({name});\n") if inst.is_target: - out.emit(f"PREDICTED({name});\n") + out.emit(f"PREDICTED_{name}:;\n") if needs_this: out.emit(f"_Py_CODEUNIT* const this_instr = next_instr - {inst.size};\n") out.emit(unused_guard) @@ -182,7 +240,6 @@ def generate_tier1( out.start_line() out.emit("}") out.emit("\n") - outfile.write(FOOTER) arg_parser = argparse.ArgumentParser( diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index dd16a1a7eb28b5..4540eb252634ba 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -69,41 +69,11 @@ def __init__(self, out: CWriter): super().__init__(out) self._replacers["oparg"] = self.oparg - def error_if( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: Uop, - storage: Storage, - inst: Instruction | None, - ) -> bool: - self.out.emit_at("if ", tkn) - lparen = next(tkn_iter) - self.emit(lparen) - assert lparen.kind == "LPAREN" - first_tkn = next(tkn_iter) - self.out.emit(first_tkn) - emit_to(self.out, tkn_iter, "COMMA") - label = next(tkn_iter).text - next(tkn_iter) # RPAREN - next(tkn_iter) # Semi colon - self.emit(") JUMP_TO_ERROR();\n") - return not always_true(first_tkn) - - - def error_no_pop( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: Uop, - storage: Storage, - inst: Instruction | None, - ) -> bool: - next(tkn_iter) # LPAREN - next(tkn_iter) # RPAREN - next(tkn_iter) # Semi colon - self.out.emit_at("JUMP_TO_ERROR();", tkn) - return False + def goto_error(self, offset: int, label: str, storage: Storage) -> str: + # To do: Add jump targets for popping values. + if offset != 0: + storage.copy().flush(self.out) + return f"JUMP_TO_ERROR();" def deopt_if( self, diff --git a/Tools/clinic/libclinic/converters.py b/Tools/clinic/libclinic/converters.py index a65f73ba02fb5d..2998eb519648aa 100644 --- a/Tools/clinic/libclinic/converters.py +++ b/Tools/clinic/libclinic/converters.py @@ -1182,10 +1182,8 @@ def pre_render(self) -> None: @property def parser_type(self) -> str: assert self.type is not None - if self.function.kind in {METHOD_INIT, METHOD_NEW, STATIC_METHOD, CLASS_METHOD}: - tp, _ = correct_name_for_self(self.function) - return tp - return self.type + tp, _ = correct_name_for_self(self.function) + return tp def render(self, parameter: Parameter, data: CRenderData) -> None: """ diff --git a/Tools/clinic/mypy.ini b/Tools/clinic/mypy.ini index b1fdad673c61a1..6520e05db0bc31 100644 --- a/Tools/clinic/mypy.ini +++ b/Tools/clinic/mypy.ini @@ -7,6 +7,6 @@ python_version = 3.10 # and be strict! strict = True -strict_concatenate = True +extra_checks = True enable_error_code = ignore-without-code,redundant-expr,truthy-bool warn_unreachable = True diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 698ecbd3b549aa..27aa6b0cc266d3 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -99,7 +99,7 @@ def interp_frame_has_tlbc_index(): Py_TPFLAGS_TYPE_SUBCLASS = (1 << 31) #From pycore_frame.h -FRAME_OWNED_BY_CSTACK = 3 +FRAME_OWNED_BY_INTERPRETER = 3 MAX_OUTPUT_LEN=1024 @@ -890,7 +890,7 @@ class PyLongObjectPtr(PyObjectPtr): def proxyval(self, visited): ''' - Python's Include/longinterpr.h has this declaration: + Python's Include/cpython/longinterpr.h has this declaration: typedef struct _PyLongValue { uintptr_t lv_tag; /* Number of digits, sign and flags */ @@ -909,8 +909,7 @@ def proxyval(self, visited): - 0: Positive - 1: Zero - 2: Negative - The third lowest bit of lv_tag is reserved for an immortality flag, but is - not currently used. + The third lowest bit of lv_tag is set to 1 for the small ints and 0 otherwise. where SHIFT can be either: #define PyLong_SHIFT 30 @@ -1113,7 +1112,7 @@ def _f_lasti(self): return int(instr_ptr - first_instr) def is_shim(self): - return self._f_special("owner", int) == FRAME_OWNED_BY_CSTACK + return self._f_special("owner", int) == FRAME_OWNED_BY_INTERPRETER def previous(self): return self._f_special("previous", PyFramePtr) diff --git a/Tools/i18n/pygettext.py b/Tools/i18n/pygettext.py index f78ff16bff9039..81d9fdbb36017b 100755 --- a/Tools/i18n/pygettext.py +++ b/Tools/i18n/pygettext.py @@ -1,26 +1,6 @@ #! /usr/bin/env python3 -# -*- coding: iso-8859-1 -*- -# Originally written by Barry Warsaw -# -# Minimally patched to make it even more xgettext compatible -# by Peter Funk -# -# 2002-11-22 Jürgen Hermann -# Added checks that _() only contains string literals, and -# command line args are resolved to module lists, i.e. you -# can now pass a filename, a module or package name, or a -# directory (including globbing chars, important for Win32). -# Made docstring fit in 80 chars wide displays using pydoc. -# - -# for selftesting -try: - import fintl - _ = fintl.gettext -except ImportError: - _ = lambda s: s -__doc__ = _("""pygettext -- Python equivalent of xgettext(1) +"""pygettext -- Python equivalent of xgettext(1) Many systems (Solaris, Linux, Gnu) provide extensive tools that ease the internationalization of C programs. Most of these tools are independent of @@ -153,16 +133,16 @@ conjunction with the -D option above. If `inputfile' is -, standard input is read. -""") +""" -import os +import ast +import getopt +import glob import importlib.machinery import importlib.util +import os import sys -import glob import time -import getopt -import ast import tokenize from collections import defaultdict from dataclasses import dataclass, field @@ -173,7 +153,7 @@ # The normal pot-file header. msgmerge and Emacs's po-mode work better if it's # there. -pot_header = _('''\ +pot_header = '''\ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR ORGANIZATION # FIRST AUTHOR , YEAR. @@ -190,7 +170,7 @@ "Content-Transfer-Encoding: %(encoding)s\\n" "Generated-By: pygettext.py %(version)s\\n" -''') +''' def usage(code, msg=''): @@ -204,7 +184,7 @@ def make_escapes(pass_nonascii): global escapes, escape if pass_nonascii: # Allow non-ascii characters to pass through so that e.g. 'msgid - # "Höhe"' would result not result in 'msgid "H\366he"'. Otherwise we + # "Höhe"' would result not result in 'msgid "H\366he"'. Otherwise we # escape any character outside the 32..126 range. mod = 128 escape = escape_ascii @@ -224,6 +204,7 @@ def make_escapes(pass_nonascii): def escape_ascii(s, encoding): return ''.join(escapes[ord(c)] if ord(c) < 128 else c for c in s) + def escape_nonascii(s, encoding): return ''.join(escapes[b] for b in s.encode(encoding)) @@ -416,7 +397,7 @@ def __waiting(self, ttype, tstring, lineno): if func_name not in opts.keywords: continue if len(call.args) != 1: - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected amount of' ' positional arguments in gettext call: %(source_segment)s' ) % { @@ -426,7 +407,7 @@ def __waiting(self, ttype, tstring, lineno): }, file=sys.stderr) continue if call.keywords: - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected keyword arguments' ' in gettext call: %(source_segment)s' ) % { @@ -437,7 +418,7 @@ def __waiting(self, ttype, tstring, lineno): continue arg = call.args[0] if not isinstance(arg, ast.Constant): - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected argument type' ' in gettext call: %(source_segment)s' ) % { @@ -550,7 +531,7 @@ def __addentry(self, msg, lineno=None, *, is_docstring=False): ) def warn_unexpected_token(self, token): - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' ) % { 'token': token, @@ -677,7 +658,7 @@ class Options: elif opt in ('-S', '--style'): options.locationstyle = locations.get(arg.lower()) if options.locationstyle is None: - usage(1, _('Invalid value for --style: %s') % arg) + usage(1, f'Invalid value for --style: {arg}') elif opt in ('-o', '--output'): options.outfile = arg elif opt in ('-p', '--output-dir'): @@ -685,13 +666,13 @@ class Options: elif opt in ('-v', '--verbose'): options.verbose = 1 elif opt in ('-V', '--version'): - print(_('pygettext.py (xgettext for Python) %s') % __version__) + print(f'pygettext.py (xgettext for Python) {__version__}') sys.exit(0) elif opt in ('-w', '--width'): try: options.width = int(arg) except ValueError: - usage(1, _('--width argument must be an integer: %s') % arg) + usage(1, f'--width argument must be an integer: {arg}') elif opt in ('-x', '--exclude-file'): options.excludefilename = arg elif opt in ('-X', '--no-docstrings'): @@ -719,8 +700,8 @@ class Options: with open(options.excludefilename) as fp: options.toexclude = fp.readlines() except IOError: - print(_( - "Can't read --exclude-file: %s") % options.excludefilename, file=sys.stderr) + print(f"Can't read --exclude-file: {options.excludefilename}", + file=sys.stderr) sys.exit(1) else: options.toexclude = [] @@ -739,12 +720,12 @@ class Options: for filename in args: if filename == '-': if options.verbose: - print(_('Reading standard input')) + print('Reading standard input') fp = sys.stdin.buffer closep = 0 else: if options.verbose: - print(_('Working on %s') % filename) + print(f'Working on {filename}') fp = open(filename, 'rb') closep = 1 try: @@ -779,7 +760,3 @@ class Options: if __name__ == '__main__': main() - # some more test strings - # this one creates a warning - _('*** Seen unexpected token "%(token)s"') % {'token': 'test'} - _('more' 'than' 'one' 'string') diff --git a/Tools/jit/README.md b/Tools/jit/README.md index 801c64e4059ccc..4107265754f6ec 100644 --- a/Tools/jit/README.md +++ b/Tools/jit/README.md @@ -3,6 +3,8 @@ The JIT Compiler This version of CPython can be built with an experimental just-in-time compiler[^pep-744]. While most everything you already know about building and using CPython is unchanged, you will probably need to install a compatible version of LLVM first. +Python 3.11 or newer is required to build the JIT. + ## Installing LLVM The JIT compiler does not require end users to install any third-party dependencies, but part of it must be *built* using LLVM[^why-llvm]. You are *not* required to build the rest of CPython using LLVM, or even the same version of LLVM (in fact, this is uncommon). @@ -54,7 +56,7 @@ For `PCbuild`-based builds, pass the new `--experimental-jit` option to `build.b For all other builds, pass the new `--enable-experimental-jit` option to `configure`. -Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform. +Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform. The JIT can also be enabled or disabled using the `PYTHON_JIT` environment variable, even on builds where it is enabled or disabled by default. More details about configuring CPython with the JIT and optional values for `--enable-experimental-jit` can be found [here](https://docs.python.org/dev/whatsnew/3.13.html#experimental-jit-compiler). diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index bc7ccfe33e777d..161af09183a282 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -545,6 +545,41 @@ def get_optimizer_stats(self) -> dict[str, tuple[int, int | None]]: ): (incorrect_keys, attempts), } + def get_jit_memory_stats(self) -> dict[Doc, tuple[int, int | None]]: + jit_total_memory_size = self._data["JIT total memory size"] + jit_code_size = self._data["JIT code size"] + jit_trampoline_size = self._data["JIT trampoline size"] + jit_data_size = self._data["JIT data size"] + jit_padding_size = self._data["JIT padding size"] + jit_freed_memory_size = self._data["JIT freed memory size"] + + return { + Doc( + "Total memory size", + "The total size of the memory allocated for the JIT traces", + ): (jit_total_memory_size, None), + Doc( + "Code size", + "The size of the memory allocated for the code of the JIT traces", + ): (jit_code_size, jit_total_memory_size), + Doc( + "Trampoline size", + "The size of the memory allocated for the trampolines of the JIT traces", + ): (jit_trampoline_size, jit_total_memory_size), + Doc( + "Data size", + "The size of the memory allocated for the data of the JIT traces", + ): (jit_data_size, jit_total_memory_size), + Doc( + "Padding size", + "The size of the memory allocated for the padding of the JIT traces", + ): (jit_padding_size, jit_total_memory_size), + Doc( + "Freed memory size", + "The size of the memory freed from the JIT traces", + ): (jit_freed_memory_size, jit_total_memory_size), + } + def get_histogram(self, prefix: str) -> list[tuple[int, int]]: rows = [] for k, v in self._data.items(): @@ -1161,16 +1196,31 @@ def calc_optimizer_table(stats: Stats) -> Rows: for label, (value, den) in optimizer_stats.items() ] - def calc_histogram_table(key: str, den: str) -> RowCalculator: + def calc_jit_memory_table(stats: Stats) -> Rows: + jit_memory_stats = stats.get_jit_memory_stats() + + return [ + ( + label, + Count(value), + Ratio(value, den, percentage=label != "Total memory size"), + ) + for label, (value, den) in jit_memory_stats.items() + ] + + def calc_histogram_table(key: str, den: str | None = None) -> RowCalculator: def calc(stats: Stats) -> Rows: histogram = stats.get_histogram(key) - denominator = stats.get(den) + + if den: + denominator = stats.get(den) + else: + denominator = 0 + for _, v in histogram: + denominator += v rows: Rows = [] - last_non_zero = 0 for k, v in histogram: - if v != 0: - last_non_zero = len(rows) rows.append( ( f"<= {k:,d}", @@ -1178,9 +1228,19 @@ def calc(stats: Stats) -> Rows: Ratio(v, denominator), ) ) - # Don't include any zero entries at the end - rows = rows[: last_non_zero + 1] - return rows + # Don't include any leading and trailing zero entries + start = 0 + end = len(rows) - 1 + + while start <= end: + if rows[start][1] == 0: + start += 1 + elif rows[end][1] == 0: + end -= 1 + else: + break + + return rows[start:end+1] return calc @@ -1214,6 +1274,28 @@ def iter_optimization_tables(base_stats: Stats, head_stats: Stats | None = None) yield Table(("", "Count:", "Ratio:"), calc_optimization_table, JoinMode.CHANGE) yield Table(("", "Count:", "Ratio:"), calc_optimizer_table, JoinMode.CHANGE) + yield Section( + "JIT memory stats", + "JIT memory stats", + [ + Table( + ("", "Size (bytes):", "Ratio:"), + calc_jit_memory_table, + JoinMode.CHANGE + ) + ], + ) + yield Section( + "JIT trace total memory histogram", + "JIT trace total memory histogram", + [ + Table( + ("Size (bytes)", "Count", "Ratio:"), + calc_histogram_table("Trace total memory size"), + JoinMode.CHANGE_NO_SORT, + ) + ], + ) for name, den in [ ("Trace length", "Optimization traces created"), ("Optimized trace length", "Optimization traces created"), diff --git a/Tools/tsan/supressions.txt b/Tools/tsan/suppressions.txt similarity index 100% rename from Tools/tsan/supressions.txt rename to Tools/tsan/suppressions.txt diff --git a/configure b/configure index 61ee51c4b36473..3eb787f788bfb9 100755 --- a/configure +++ b/configure @@ -814,7 +814,7 @@ MODULE_TIME_TRUE MODULE__IO_FALSE MODULE__IO_TRUE MODULE_BUILDTYPE -PYTHREAD_NAME_MAXLEN +_PYTHREAD_NAME_MAXLEN TEST_MODULES OPENSSL_LDFLAGS OPENSSL_LIBS @@ -4097,6 +4097,9 @@ then *-apple-ios*) ac_sys_system=iOS ;; + *-*-darwin*) + ac_sys_system=Darwin + ;; *-*-vxworks*) ac_sys_system=VxWorks ;; @@ -4129,6 +4132,7 @@ then case $MACHDEP in aix*) MACHDEP="aix";; + freebsd*) MACHDEP="freebsd";; linux-android*) MACHDEP="android";; linux*) MACHDEP="linux";; cygwin*) MACHDEP="cygwin";; @@ -4591,11 +4595,20 @@ printf "%s\n" "$IPHONEOS_DEPLOYMENT_TARGET" >&6; } ;; esac ;; + *-*-darwin*) + case "$host_cpu" in + arm*) + _host_ident=arm + ;; + *) + _host_ident=$host_cpu + esac + ;; *-*-vxworks*) _host_ident=$host_cpu ;; *-*-emscripten) - _host_ident=$(emcc -dumpversion)-$host_cpu + _host_ident=$(emcc -dumpversion | cut -f1 -d-)-$host_cpu ;; wasm32-*-* | wasm64-*-*) _host_ident=$host_cpu @@ -9398,7 +9411,7 @@ fi printf %s "checking BOLT_COMMON_FLAGS... " >&6; } if test -z "${BOLT_COMMON_FLAGS}" then - BOLT_COMMON_FLAGS=-update-debug-sections + BOLT_COMMON_FLAGS=" -update-debug-sections -skip-funcs=_PyEval_EvalFrameDefault,sre_ucs1_match/1,sre_ucs2_match/1,sre_ucs4_match/1 " fi @@ -9615,7 +9628,7 @@ fi as_fn_append LINKFORSHARED " -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js" as_fn_append LINKFORSHARED " -sEXPORTED_RUNTIME_METHODS=FS,callMain,ENV" - as_fn_append LINKFORSHARED " -sEXPORTED_FUNCTIONS=_main,_Py_Version" + as_fn_append LINKFORSHARED " -sEXPORTED_FUNCTIONS=_main,_Py_Version,__PyRuntime,__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET" as_fn_append LINKFORSHARED " -sSTACK_SIZE=5MB" if test "x$enable_wasm_dynamic_linking" = xyes @@ -19001,6 +19014,12 @@ if test "x$ac_cv_func_ctermid" = xyes then : printf "%s\n" "#define HAVE_CTERMID 1" >>confdefs.h +fi +ac_fn_c_check_func "$LINENO" "dladdr" "ac_cv_func_dladdr" +if test "x$ac_cv_func_dladdr" = xyes +then : + printf "%s\n" "#define HAVE_DLADDR 1" >>confdefs.h + fi ac_fn_c_check_func "$LINENO" "dup" "ac_cv_func_dup" if test "x$ac_cv_func_dup" = xyes @@ -19986,10 +20005,10 @@ then : printf "%s\n" "#define HAVE_TRUNCATE 1" >>confdefs.h fi -ac_fn_c_check_func "$LINENO" "ttyname" "ac_cv_func_ttyname" -if test "x$ac_cv_func_ttyname" = xyes +ac_fn_c_check_func "$LINENO" "ttyname_r" "ac_cv_func_ttyname_r" +if test "x$ac_cv_func_ttyname_r" = xyes then : - printf "%s\n" "#define HAVE_TTYNAME 1" >>confdefs.h + printf "%s\n" "#define HAVE_TTYNAME_R 1" >>confdefs.h fi ac_fn_c_check_func "$LINENO" "umask" "ac_cv_func_umask" @@ -30385,17 +30404,17 @@ CPPFLAGS=$save_CPPFLAGS # gh-59705: Maximum length in bytes of a thread name case "$ac_sys_system" in - Linux*) PYTHREAD_NAME_MAXLEN=15;; # Linux and Android - SunOS*) PYTHREAD_NAME_MAXLEN=31;; - NetBSD*) PYTHREAD_NAME_MAXLEN=31;; - Darwin) PYTHREAD_NAME_MAXLEN=63;; - iOS) PYTHREAD_NAME_MAXLEN=63;; - FreeBSD*) PYTHREAD_NAME_MAXLEN=98;; - *) PYTHREAD_NAME_MAXLEN=;; + Linux*) _PYTHREAD_NAME_MAXLEN=15;; # Linux and Android + SunOS*) _PYTHREAD_NAME_MAXLEN=31;; + NetBSD*) _PYTHREAD_NAME_MAXLEN=31;; + Darwin) _PYTHREAD_NAME_MAXLEN=63;; + iOS) _PYTHREAD_NAME_MAXLEN=63;; + FreeBSD*) _PYTHREAD_NAME_MAXLEN=98;; + *) _PYTHREAD_NAME_MAXLEN=;; esac -if test -n "$PYTHREAD_NAME_MAXLEN"; then +if test -n "$_PYTHREAD_NAME_MAXLEN"; then -printf "%s\n" "#define PYTHREAD_NAME_MAXLEN $PYTHREAD_NAME_MAXLEN" >>confdefs.h +printf "%s\n" "#define _PYTHREAD_NAME_MAXLEN $_PYTHREAD_NAME_MAXLEN" >>confdefs.h fi diff --git a/configure.ac b/configure.ac index 172e8a1a842010..c0130b8082cd8a 100644 --- a/configure.ac +++ b/configure.ac @@ -330,6 +330,9 @@ then *-apple-ios*) ac_sys_system=iOS ;; + *-*-darwin*) + ac_sys_system=Darwin + ;; *-*-vxworks*) ac_sys_system=VxWorks ;; @@ -362,6 +365,7 @@ then case $MACHDEP in aix*) MACHDEP="aix";; + freebsd*) MACHDEP="freebsd";; linux-android*) MACHDEP="android";; linux*) MACHDEP="linux";; cygwin*) MACHDEP="cygwin";; @@ -790,11 +794,20 @@ if test "$cross_compiling" = yes; then ;; esac ;; + *-*-darwin*) + case "$host_cpu" in + arm*) + _host_ident=arm + ;; + *) + _host_ident=$host_cpu + esac + ;; *-*-vxworks*) _host_ident=$host_cpu ;; *-*-emscripten) - _host_ident=$(emcc -dumpversion)-$host_cpu + _host_ident=$(emcc -dumpversion | cut -f1 -d-)-$host_cpu ;; wasm32-*-* | wasm64-*-*) _host_ident=$host_cpu @@ -2170,7 +2183,14 @@ if test -z "${BOLT_COMMON_FLAGS}" then AS_VAR_SET( [BOLT_COMMON_FLAGS], - [-update-debug-sections] + [m4_normalize(" + [-update-debug-sections] + + dnl At least LLVM 19.x doesn't support computed gotos in PIC compiled code. + dnl Exclude functions containing computed gotos. + dnl TODO this may be fixed in LLVM 20.x via https://github.com/llvm/llvm-project/pull/120267. + [-skip-funcs=_PyEval_EvalFrameDefault,sre_ucs1_match/1,sre_ucs2_match/1,sre_ucs4_match/1] + ")] ) fi @@ -2350,7 +2370,7 @@ AS_CASE([$ac_sys_system], dnl Include file system support AS_VAR_APPEND([LINKFORSHARED], [" -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js"]) AS_VAR_APPEND([LINKFORSHARED], [" -sEXPORTED_RUNTIME_METHODS=FS,callMain,ENV"]) - AS_VAR_APPEND([LINKFORSHARED], [" -sEXPORTED_FUNCTIONS=_main,_Py_Version"]) + AS_VAR_APPEND([LINKFORSHARED], [" -sEXPORTED_FUNCTIONS=_main,_Py_Version,__PyRuntime,__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET"]) AS_VAR_APPEND([LINKFORSHARED], [" -sSTACK_SIZE=5MB"]) AS_VAR_IF([enable_wasm_dynamic_linking], [yes], [ @@ -5121,7 +5141,7 @@ fi # checks for library functions AC_CHECK_FUNCS([ \ accept4 alarm bind_textdomain_codeset chmod chown clock closefrom close_range confstr \ - copy_file_range ctermid dup dup3 execv explicit_bzero explicit_memset \ + copy_file_range ctermid dladdr dup dup3 execv explicit_bzero explicit_memset \ faccessat fchmod fchmodat fchown fchownat fdopendir fdwalk fexecve \ fork fork1 fpathconf fstatat ftime ftruncate futimens futimes futimesat \ gai_strerror getegid geteuid getgid getgrent getgrgid getgrgid_r \ @@ -5145,7 +5165,7 @@ AC_CHECK_FUNCS([ \ sigfillset siginterrupt sigpending sigrelse sigtimedwait sigwait \ sigwaitinfo snprintf splice strftime strlcpy strsignal symlinkat sync \ sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile \ - tmpnam tmpnam_r truncate ttyname umask uname unlinkat unlockpt utimensat utimes vfork \ + tmpnam tmpnam_r truncate ttyname_r umask uname unlinkat unlockpt utimensat utimes vfork \ wait wait3 wait4 waitid waitpid wcscoll wcsftime wcsxfrm wmemcmp writev \ ]) @@ -7520,19 +7540,19 @@ _RESTORE_VAR([CPPFLAGS]) # gh-59705: Maximum length in bytes of a thread name case "$ac_sys_system" in - Linux*) PYTHREAD_NAME_MAXLEN=15;; # Linux and Android - SunOS*) PYTHREAD_NAME_MAXLEN=31;; - NetBSD*) PYTHREAD_NAME_MAXLEN=31;; - Darwin) PYTHREAD_NAME_MAXLEN=63;; - iOS) PYTHREAD_NAME_MAXLEN=63;; - FreeBSD*) PYTHREAD_NAME_MAXLEN=98;; - *) PYTHREAD_NAME_MAXLEN=;; + Linux*) _PYTHREAD_NAME_MAXLEN=15;; # Linux and Android + SunOS*) _PYTHREAD_NAME_MAXLEN=31;; + NetBSD*) _PYTHREAD_NAME_MAXLEN=31;; + Darwin) _PYTHREAD_NAME_MAXLEN=63;; + iOS) _PYTHREAD_NAME_MAXLEN=63;; + FreeBSD*) _PYTHREAD_NAME_MAXLEN=98;; + *) _PYTHREAD_NAME_MAXLEN=;; esac -if test -n "$PYTHREAD_NAME_MAXLEN"; then - AC_DEFINE_UNQUOTED([PYTHREAD_NAME_MAXLEN], [$PYTHREAD_NAME_MAXLEN], +if test -n "$_PYTHREAD_NAME_MAXLEN"; then + AC_DEFINE_UNQUOTED([_PYTHREAD_NAME_MAXLEN], [$_PYTHREAD_NAME_MAXLEN], [Maximum length in bytes of a thread name]) fi -AC_SUBST([PYTHREAD_NAME_MAXLEN]) +AC_SUBST([_PYTHREAD_NAME_MAXLEN]) # stdlib diff --git a/iOS/testbed/__main__.py b/iOS/testbed/__main__.py index 068272835a5b95..b4499f5ac171a8 100644 --- a/iOS/testbed/__main__.py +++ b/iOS/testbed/__main__.py @@ -2,6 +2,7 @@ import asyncio import json import plistlib +import re import shutil import subprocess import sys @@ -12,6 +13,18 @@ DECODE_ARGS = ("UTF-8", "backslashreplace") +# The system log prefixes each line: +# 2025-01-17 16:14:29.090 Df iOSTestbed[23987:1fd393b4] (Python) ... +# 2025-01-17 16:14:29.090 E iOSTestbed[23987:1fd393b4] (Python) ... + +LOG_PREFIX_REGEX = re.compile( + r"^\d{4}-\d{2}-\d{2}" # YYYY-MM-DD + r"\s+\d+:\d{2}:\d{2}\.\d+" # HH:MM:SS.sss + r"\s+\w+" # Df/E + r"\s+iOSTestbed\[\d+:\w+\]" # Process/thread ID + r"\s+\(Python\)\s" # Logger name +) + # Work around a bug involving sys.exit and TaskGroups # (https://github.com/python/cpython/issues/101515). @@ -131,6 +144,8 @@ async def log_stream_task(initial_devices): ) as process: suppress_dupes = False while line := (await process.stdout.readline()).decode(*DECODE_ARGS): + # Strip the prefix from each log line + line = LOG_PREFIX_REGEX.sub("", line) # The iOS log streamer can sometimes lag; when it does, it outputs # a warning about messages being dropped... often multiple times. # Only print the first of these duplicated warnings. diff --git a/pyconfig.h.in b/pyconfig.h.in index d862966b7de4c8..30e55158bad4d6 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -286,6 +286,9 @@ /* Define if you have the 'dirfd' function or macro. */ #undef HAVE_DIRFD +/* Define to 1 if you have the 'dladdr' function. */ +#undef HAVE_DLADDR + /* Define to 1 if you have the header file. */ #undef HAVE_DLFCN_H @@ -1506,8 +1509,8 @@ /* Define to 1 if you have the 'truncate' function. */ #undef HAVE_TRUNCATE -/* Define to 1 if you have the 'ttyname' function. */ -#undef HAVE_TTYNAME +/* Define to 1 if you have the 'ttyname_r' function. */ +#undef HAVE_TTYNAME_R /* Define to 1 if you don't have 'tm_zone' but do have the external array 'tzname'. */ @@ -1659,9 +1662,6 @@ /* Define as the preferred size in bits of long digits */ #undef PYLONG_BITS_IN_DIGIT -/* Maximum length in bytes of a thread name */ -#undef PYTHREAD_NAME_MAXLEN - /* enabled builtin hash modules */ #undef PY_BUILTIN_HASHLIB_HASHES @@ -1977,6 +1977,9 @@ /* framework name */ #undef _PYTHONFRAMEWORK +/* Maximum length in bytes of a thread name */ +#undef _PYTHREAD_NAME_MAXLEN + /* Define to force use of thread-safe errno, h_errno, and other functions */ #undef _REENTRANT